From 737adb2d40a32bb27d46ae97736e5ba638252f9d Mon Sep 17 00:00:00 2001 From: swathipil <76007337+swathipil@users.noreply.github.com> Date: Fri, 20 Dec 2024 18:05:52 -0800 Subject: [PATCH] [EventHub] update checkpointstoreblob vendored storage for lint/typing changes (#38757) * update changelog * update sync blob vendor * update async ckpt blob vendor * update async ckpt blob changelog * added vendored storage deps to setup.py * cp over latest storage updates --- .vscode/cspell.json | 2 + .../CHANGELOG.md | 5 + .../_vendor/__init__.py | 2 +- .../_vendor/storage/__init__.py | 2 +- .../_vendor/storage/blob/__init__.py | 82 +- .../_vendor/storage/blob/_blob_client.py | 2495 +++---- .../storage/blob/_blob_client_helpers.py | 1246 ++++ .../storage/blob/_blob_service_client.py | 373 +- .../blob/_blob_service_client_helpers.py | 27 + .../_vendor/storage/blob/_container_client.py | 1022 +-- .../storage/blob/_container_client_helpers.py | 266 + .../_vendor/storage/blob/_deserialize.py | 160 +- .../_vendor/storage/blob/_download.py | 887 ++- .../_vendor/storage/blob/_encryption.py | 1127 +++ .../storage/blob/_generated/__init__.py | 15 +- .../blob/_generated/_azure_blob_storage.py | 130 +- .../storage/blob/_generated/_configuration.py | 55 +- .../_vendor/storage/blob/_generated/_patch.py | 33 + .../storage/blob/_generated/_serialization.py | 2115 ++++++ .../storage/blob/_generated/aio/__init__.py | 15 +- .../_generated/aio/_azure_blob_storage.py | 116 +- .../blob/_generated/aio/_configuration.py | 49 +- .../storage/blob/_generated/aio/_patch.py | 33 + .../_generated/aio/operations/__init__.py | 20 +- .../aio/operations/_append_blob_operations.py | 957 +-- .../aio/operations/_blob_operations.py | 4403 ++++++------ .../aio/operations/_block_blob_operations.py | 1479 ++-- .../aio/operations/_container_operations.py | 2204 +++--- .../aio/operations/_page_blob_operations.py | 1862 ++--- .../blob/_generated/aio/operations/_patch.py | 26 + .../aio/operations/_service_operations.py | 960 +-- .../blob/_generated/models/__init__.py | 372 +- .../models/_azure_blob_storage_enums.py | 164 +- .../blob/_generated/models/_models_py3.py | 3053 +++++---- .../storage/blob/_generated/models/_patch.py | 26 + .../blob/_generated/operations/__init__.py | 20 +- .../operations/_append_blob_operations.py | 1375 ++-- .../_generated/operations/_blob_operations.py | 6057 ++++++++++------- .../operations/_block_blob_operations.py | 2181 ++++-- .../operations/_container_operations.py | 3170 ++++++--- .../operations/_page_blob_operations.py | 2729 +++++--- .../blob/_generated/operations/_patch.py | 26 + .../operations/_service_operations.py | 1324 ++-- .../_vendor/storage/blob/_lease.py | 136 +- .../storage/blob/_list_blobs_helper.py | 302 +- .../_vendor/storage/blob/_models.py | 1218 ++-- .../storage/blob/_quick_query_helper.py | 70 +- .../_vendor/storage/blob/_serialize.py | 130 +- .../_vendor/storage/blob/_shared/__init__.py | 10 +- .../storage/blob/_shared/authentication.py | 145 +- .../storage/blob/_shared/avro/avro_io.py | 131 +- .../blob/_shared/avro/avro_io_async.py | 129 +- .../storage/blob/_shared/avro/datafile.py | 23 +- .../blob/_shared/avro/datafile_async.py | 19 +- .../storage/blob/_shared/avro/schema.py | 210 +- .../storage/blob/_shared/base_client.py | 291 +- .../storage/blob/_shared/base_client_async.py | 185 +- .../_vendor/storage/blob/_shared/constants.py | 18 +- .../_vendor/storage/blob/_shared/models.py | 493 +- .../_vendor/storage/blob/_shared/parser.py | 46 +- .../_vendor/storage/blob/_shared/policies.py | 298 +- .../storage/blob/_shared/policies_async.py | 138 +- .../storage/blob/_shared/request_handlers.py | 175 +- .../storage/blob/_shared/response_handlers.py | 116 +- .../blob/_shared/shared_access_signature.py | 52 +- .../_vendor/storage/blob/_shared/uploads.py | 160 +- .../storage/blob/_shared/uploads_async.py | 196 +- .../storage/blob/_shared_access_signature.py | 297 +- .../_vendor/storage/blob/_upload_helpers.py | 231 +- .../_vendor/storage/blob/_version.py | 2 +- .../_vendor/storage/blob/aio/__init__.py | 59 +- .../storage/blob/aio/_blob_client_async.py | 1384 +++- .../blob/aio/_blob_service_client_async.py | 400 +- .../blob/aio/_container_client_async.py | 819 ++- .../storage/blob/aio/_download_async.py | 864 ++- .../storage/blob/aio/_encryption_async.py | 72 + .../_vendor/storage/blob/aio/_lease_async.py | 143 +- .../storage/blob/aio/_list_blobs_helper.py | 222 +- .../_vendor/storage/blob/aio/_models.py | 140 +- .../storage/blob/aio/_upload_helpers.py | 242 +- .../_vendor/storage/blob/py.typed | 0 .../setup.py | 5 +- .../CHANGELOG.md | 5 + .../checkpointstoreblob/_vendor/__init__.py | 2 +- .../_vendor/storage/__init__.py | 2 +- .../_vendor/storage/blob/__init__.py | 82 +- .../_vendor/storage/blob/_blob_client.py | 2495 +++---- .../storage/blob/_blob_client_helpers.py | 1246 ++++ .../storage/blob/_blob_service_client.py | 373 +- .../blob/_blob_service_client_helpers.py | 27 + .../_vendor/storage/blob/_container_client.py | 1020 +-- .../storage/blob/_container_client_helpers.py | 266 + .../_vendor/storage/blob/_deserialize.py | 160 +- .../_vendor/storage/blob/_download.py | 887 ++- .../_vendor/storage/blob/_encryption.py | 1127 +++ .../storage/blob/_generated/__init__.py | 15 +- .../blob/_generated/_azure_blob_storage.py | 130 +- .../storage/blob/_generated/_configuration.py | 55 +- .../_vendor/storage/blob/_generated/_patch.py | 33 + .../storage/blob/_generated/_serialization.py | 2115 ++++++ .../storage/blob/_generated/aio/__init__.py | 15 +- .../_generated/aio/_azure_blob_storage.py | 116 +- .../blob/_generated/aio/_configuration.py | 49 +- .../storage/blob/_generated/aio/_patch.py | 33 + .../_generated/aio/operations/__init__.py | 20 +- .../aio/operations/_append_blob_operations.py | 957 +-- .../aio/operations/_blob_operations.py | 4403 ++++++------ .../aio/operations/_block_blob_operations.py | 1479 ++-- .../aio/operations/_container_operations.py | 2204 +++--- .../aio/operations/_directory_operations.py | 739 -- .../aio/operations/_page_blob_operations.py | 1862 ++--- .../blob/_generated/aio/operations/_patch.py | 26 + .../aio/operations/_service_operations.py | 960 +-- .../blob/_generated/models/__init__.py | 372 +- .../models/_azure_blob_storage_enums.py | 164 +- .../storage/blob/_generated/models/_models.py | 2024 ------ .../blob/_generated/models/_models_py3.py | 3053 +++++---- .../storage/blob/_generated/models/_patch.py | 26 + .../blob/_generated/operations/__init__.py | 20 +- .../operations/_append_blob_operations.py | 1375 ++-- .../_generated/operations/_blob_operations.py | 6057 ++++++++++------- .../operations/_block_blob_operations.py | 2181 ++++-- .../operations/_container_operations.py | 3170 ++++++--- .../operations/_directory_operations.py | 748 -- .../operations/_page_blob_operations.py | 2729 +++++--- .../blob/_generated/operations/_patch.py | 26 + .../operations/_service_operations.py | 1324 ++-- .../_vendor/storage/blob/_lease.py | 136 +- .../storage/blob/_list_blobs_helper.py | 302 +- .../_vendor/storage/blob/_models.py | 1218 ++-- .../storage/blob/_quick_query_helper.py | 70 +- .../_vendor/storage/blob/_serialize.py | 130 +- .../_vendor/storage/blob/_shared/__init__.py | 10 +- .../storage/blob/_shared/authentication.py | 145 +- .../storage/blob/_shared/avro/avro_io.py | 131 +- .../blob/_shared/avro/avro_io_async.py | 129 +- .../storage/blob/_shared/avro/datafile.py | 23 +- .../blob/_shared/avro/datafile_async.py | 19 +- .../storage/blob/_shared/avro/schema.py | 210 +- .../storage/blob/_shared/base_client.py | 291 +- .../storage/blob/_shared/base_client_async.py | 185 +- .../_vendor/storage/blob/_shared/constants.py | 18 +- .../storage/blob/_shared/encryption.py | 542 -- .../_vendor/storage/blob/_shared/models.py | 493 +- .../_vendor/storage/blob/_shared/parser.py | 53 +- .../_vendor/storage/blob/_shared/policies.py | 294 +- .../storage/blob/_shared/policies_async.py | 138 +- .../storage/blob/_shared/request_handlers.py | 173 +- .../storage/blob/_shared/response_handlers.py | 116 +- .../blob/_shared/shared_access_signature.py | 54 +- .../_vendor/storage/blob/_shared/uploads.py | 160 +- .../storage/blob/_shared/uploads_async.py | 196 +- .../storage/blob/_shared_access_signature.py | 297 +- .../_vendor/storage/blob/_upload_helpers.py | 231 +- .../_vendor/storage/blob/_version.py | 2 +- .../_vendor/storage/blob/aio/__init__.py | 59 +- .../storage/blob/aio/_blob_client_async.py | 1384 +++- .../blob/aio/_blob_service_client_async.py | 400 +- .../blob/aio/_container_client_async.py | 817 ++- .../storage/blob/aio/_download_async.py | 864 ++- .../storage/blob/aio/_encryption_async.py | 72 + .../_vendor/storage/blob/aio/_lease_async.py | 143 +- .../storage/blob/aio/_list_blobs_helper.py | 222 +- .../_vendor/storage/blob/aio/_models.py | 140 +- .../storage/blob/aio/_upload_helpers.py | 242 +- .../_vendor/storage/blob/py.typed | 0 .../setup.py | 5 +- 167 files changed, 67067 insertions(+), 41455 deletions(-) create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_client_helpers.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_service_client_helpers.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_container_client_helpers.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_encryption.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_patch.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_serialization.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/_patch.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_patch.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/_patch.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_patch.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_encryption_async.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/py.typed create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_client_helpers.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_service_client_helpers.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_container_client_helpers.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_encryption.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_patch.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_serialization.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/_patch.py delete mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_directory_operations.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_patch.py delete mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_models.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_patch.py delete mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_directory_operations.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_patch.py delete mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/encryption.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_encryption_async.py create mode 100644 sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/py.typed diff --git a/.vscode/cspell.json b/.vscode/cspell.json index 9c4103e3143a..d526d37d14dc 100644 --- a/.vscode/cspell.json +++ b/.vscode/cspell.json @@ -98,6 +98,8 @@ "sdk/translation/azure-ai-translation-document/doc/**", "sdk/translation/azure-ai-translation-document/tests/glossaries-valid.csv", "sdk/storage/azure-storage-blob/**", + "sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/**", + "sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/**", "sdk/storage/azure-storage-extensions/**", "sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/**", "sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_virtual_cluster_utils/_restclient/**", diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/CHANGELOG.md b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/CHANGELOG.md index be378c7e4f59..cdcdfde1813a 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/CHANGELOG.md +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/CHANGELOG.md @@ -11,6 +11,11 @@ This version and all future versions will require Python 3.7+. Python 2.7 and 3. - Fixed a bug with `BlobCheckpointStore.claim_ownership` mutating the `ownership_list` argument to no longer mutate the argument. - Updated `azure-core` dependecy to 1.20.1 to fix `cchardet` ImportError. +### Other Changes + +- Updated vendor azure-storage-blob dependency to v12.24.0. + - Fixed typing/linting issues and other bugs. See azure-storage-blob CHANGELOG.md for more info. + ## 1.1.4 (2021-04-07) This version and all future versions will require Python 2.7 or Python 3.6+, Python 3.5 is no longer supported. diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/__init__.py index 59cb70146572..0d1f7edf5dc6 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/__init__.py @@ -1 +1 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: str +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/__init__.py index 59cb70146572..0d1f7edf5dc6 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/__init__.py @@ -1 +1 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: str +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/__init__.py index 9164961ea10a..2386595611bd 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/__init__.py @@ -3,9 +3,11 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-keyword-should-match-keyword-only + import os -from typing import Union, Iterable, AnyStr, IO, Any, Dict # pylint: disable=unused-import +from typing import Any, AnyStr, cast, Dict, IO, Iterable, Optional, Union, TYPE_CHECKING from ._version import VERSION from ._blob_client import BlobClient from ._container_client import ContainerClient @@ -16,21 +18,21 @@ from ._shared_access_signature import generate_account_sas, generate_container_sas, generate_blob_sas from ._shared.policies import ExponentialRetry, LinearRetry from ._shared.response_handlers import PartialBatchErrorException -from ._shared.models import( +from ._shared.models import ( LocationMode, ResourceTypes, AccountSasPermissions, StorageErrorCode, - UserDelegationKey -) -from ._generated.models import ( - RehydratePriority + UserDelegationKey, + Services ) +from ._generated.models import RehydratePriority from ._models import ( BlobType, BlockState, StandardBlobTier, PremiumPageBlobTier, + BlobImmutabilityPolicyMode, SequenceNumberAction, PublicAccess, BlobAnalyticsLogging, @@ -54,22 +56,27 @@ BlobQueryError, DelimitedJsonDialect, DelimitedTextDialect, + QuickQueryDialect, ArrowDialect, ArrowType, ObjectReplicationPolicy, - ObjectReplicationRule + ObjectReplicationRule, + ImmutabilityPolicy, ) from ._list_blobs_helper import BlobPrefix +if TYPE_CHECKING: + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential + __version__ = VERSION def upload_blob_to_url( - blob_url, # type: str - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - credential=None, # type: Any - **kwargs): - # type: (...) -> Dict[str, Any] + blob_url: str, + data: Union[Iterable[AnyStr], IO[AnyStr]], + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any +) -> Dict[str, Any]: """Upload data to a given URL The data will be uploaded as a block blob. @@ -82,10 +89,17 @@ def upload_blob_to_url( :param credential: The credentials with which to authenticate. This is optional if the blob URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob_to_url will overwrite any existing data. If set to False, the @@ -112,21 +126,26 @@ def upload_blob_to_url( :rtype: dict(str, Any) """ with BlobClient.from_blob_url(blob_url, credential=credential) as client: - return client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs) + return cast(BlobClient, client).upload_blob(data=data, blob_type=BlobType.BLOCKBLOB, **kwargs) -def _download_to_stream(client, handle, **kwargs): - """Download data to specified open file-handle.""" +def _download_to_stream(client: BlobClient, handle: IO[bytes], **kwargs: Any) -> None: + """ + Download data to specified open file-handle. + + :param BlobClient client: The BlobClient to download with. + :param Stream handle: A Stream to download the data into. + """ stream = client.download_blob(**kwargs) stream.readinto(handle) def download_blob_from_url( - blob_url, # type: str - output, # type: str - credential=None, # type: Any - **kwargs): - # type: (...) -> None + blob_url: str, + output: Union[str, IO[bytes]], + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any +) -> None: """Download the contents of a blob to a local file or stream. :param str blob_url: @@ -138,10 +157,17 @@ def download_blob_from_url( :param credential: The credentials with which to authenticate. This is optional if the blob URL already has a SAS token or the blob is public. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None :keyword bool overwrite: Whether the local file should be overwritten if it already exists. The default value is `False` - in which case a ValueError will be raised if the file already exists. If set to @@ -169,10 +195,10 @@ def download_blob_from_url( overwrite = kwargs.pop('overwrite', False) with BlobClient.from_blob_url(blob_url, credential=credential) as client: if hasattr(output, 'write'): - _download_to_stream(client, output, **kwargs) + _download_to_stream(client, cast(IO[bytes], output), **kwargs) else: if not overwrite and os.path.isfile(output): - raise ValueError("The file '{}' already exists.".format(output)) + raise ValueError(f"The file '{output}' already exists.") with open(output, 'wb') as file_handle: _download_to_stream(client, file_handle, **kwargs) @@ -194,6 +220,8 @@ def download_blob_from_url( 'StandardBlobTier', 'PremiumPageBlobTier', 'SequenceNumberAction', + 'BlobImmutabilityPolicyMode', + 'ImmutabilityPolicy', 'PublicAccess', 'BlobAnalyticsLogging', 'Metrics', @@ -210,6 +238,7 @@ def download_blob_from_url( 'BlobBlock', 'PageRange', 'AccessPolicy', + 'QuickQueryDialect', 'ContainerSasPermissions', 'BlobSasPermissions', 'ResourceTypes', @@ -229,5 +258,6 @@ def download_blob_from_url( 'ArrowType', 'BlobQueryReader', 'ObjectReplicationPolicy', - 'ObjectReplicationRule' + 'ObjectReplicationRule', + 'Services', ] diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_client.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_client.py index f3d2d16564a3..90049ff88e32 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_client.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_client.py @@ -3,78 +3,99 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=too-many-lines,no-self-use +# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only + +import warnings +from datetime import datetime from functools import partial -from io import BytesIO -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, IO, Iterable, AnyStr, Dict, List, Tuple, +from typing import ( + Any, AnyStr, cast, Dict, IO, Iterable, List, Optional, overload, Tuple, Union, TYPE_CHECKING ) -try: - from urllib.parse import urlparse, quote, unquote -except ImportError: - from urlparse import urlparse # type: ignore - from urllib2 import quote, unquote # type: ignore +from typing_extensions import Self -import six +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError +from azure.core.paging import ItemPaged +from azure.core.pipeline import Pipeline from azure.core.tracing.decorator import distributed_trace -from azure.core.exceptions import ResourceNotFoundError, HttpResponseError - -from ._shared import encode_base64 -from ._shared.base_client import StorageAccountHostsMixin, parse_connection_str, parse_query -from ._shared.encryption import generate_blob_encryption_data -from ._shared.uploads import IterStreamer -from ._shared.request_handlers import ( - add_metadata_headers, get_length, read_length, - validate_and_format_range_headers) -from ._shared.response_handlers import return_response_headers, process_storage_error, return_headers_and_deserialized +from ._blob_client_helpers import ( + _abort_copy_options, + _append_block_from_url_options, + _append_block_options, + _clear_page_options, + _commit_block_list_options, + _create_append_blob_options, + _create_page_blob_options, + _create_snapshot_options, + _delete_blob_options, + _download_blob_options, + _format_url, + _from_blob_url, + _get_blob_tags_options, + _get_block_list_result, + _get_page_ranges_options, + _parse_url, + _quick_query_options, + _resize_blob_options, + _seal_append_blob_options, + _set_blob_metadata_options, + _set_blob_tags_options, + _set_http_headers_options, + _set_sequence_number_options, + _stage_block_from_url_options, + _stage_block_options, + _start_copy_from_url_options, + _upload_blob_from_url_options, + _upload_blob_options, + _upload_page_options, + _upload_pages_from_url_options +) +from ._deserialize import ( + deserialize_blob_properties, + deserialize_pipeline_response_into_cls, + get_page_ranges_result, + parse_tags +) +from ._download import StorageStreamDownloader +from ._encryption import StorageEncryptionMixin, _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION from ._generated import AzureBlobStorage -from ._generated.models import ( # pylint: disable=unused-import - DeleteSnapshotsOptionType, - BlobHTTPHeaders, - BlockLookupList, - AppendPositionAccessConditions, - SequenceNumberAccessConditions, - QueryRequest, - CpkInfo) +from ._generated.models import CpkInfo +from ._lease import BlobLeaseClient +from ._models import BlobBlock, BlobProperties, BlobQueryError, BlobType, PageRange, PageRangePaged +from ._quick_query_helper import BlobQueryReader +from ._shared.base_client import parse_connection_str, StorageAccountHostsMixin, TransportWrapper +from ._shared.response_handlers import process_storage_error, return_response_headers from ._serialize import ( - get_modify_conditions, - get_source_conditions, - get_cpk_scope_info, + get_access_conditions, get_api_version, - serialize_blob_tags_header, - serialize_blob_tags, - serialize_query_format, get_access_conditions + get_modify_conditions, + get_version_id ) -from ._deserialize import get_page_ranges_result, deserialize_blob_properties, deserialize_blob_stream, parse_tags, \ - deserialize_pipeline_response_into_cls -from ._quick_query_helper import BlobQueryReader from ._upload_helpers import ( - upload_block_blob, upload_append_blob, - upload_page_blob, _any_conditions) -from ._models import BlobType, BlobBlock, BlobProperties, BlobQueryError -from ._download import StorageStreamDownloader -from ._lease import BlobLeaseClient + upload_block_blob, + upload_page_blob +) if TYPE_CHECKING: - from datetime import datetime - from ._generated.models import BlockList - from ._models import ( # pylint: disable=unused-import + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential + from azure.storage.blob import ContainerClient + from ._models import ( ContentSettings, + ImmutabilityPolicy, PremiumPageBlobTier, - StandardBlobTier, - SequenceNumberAction + SequenceNumberAction, + StandardBlobTier ) -_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION = ( - 'The require_encryption flag is set, but encryption is not supported' - ' for this method.') - -class BlobClient(StorageAccountHostsMixin): # pylint: disable=too-many-public-methods +class BlobClient(StorageAccountHostsMixin, StorageEncryptionMixin): # pylint: disable=too-many-public-methods """A client to interact with a specific blob, although that blob may not yet exist. + For more optional configuration, please click + `here `__. + :param str account_url: The URI to the storage account. In order to create a client given the full URI to the blob, use the :func:`from_blob_url` classmethod. @@ -89,13 +110,15 @@ class BlobClient(StorageAccountHostsMixin): # pylint: disable=too-many-public-m :param credential: The credentials with which to authenticate. This is optional if the account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. :keyword str api_version: - The Storage API version to use for requests. Default value is '2019-07-07'. - Setting to an older version may result in reduced feature compatibility. + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. .. versionadded:: 12.2.0 @@ -114,6 +137,11 @@ class BlobClient(StorageAccountHostsMixin): # pylint: disable=too-many-public-m the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, or 4MB. + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. .. admonition:: Example: @@ -132,69 +160,52 @@ class BlobClient(StorageAccountHostsMixin): # pylint: disable=too-many-public-m :caption: Creating the BlobClient from a SAS URL to a blob. """ def __init__( - self, account_url, # type: str - container_name, # type: str - blob_name, # type: str - snapshot=None, # type: Optional[Union[str, Dict[str, Any]]] - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None - try: - if not account_url.lower().startswith('http'): - account_url = "https://" + account_url - except AttributeError: - raise ValueError("Account URL must be a string.") - parsed_url = urlparse(account_url.rstrip('/')) - - if not (container_name and blob_name): - raise ValueError("Please specify a container name and blob name.") - if not parsed_url.netloc: - raise ValueError("Invalid URL: {}".format(account_url)) - - path_snapshot, sas_token = parse_query(parsed_url.query) - + self, account_url: str, + container_name: str, + blob_name: str, + snapshot: Optional[Union[str, Dict[str, Any]]] = None, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: + parsed_url, sas_token, path_snapshot = _parse_url( + account_url=account_url, + container_name=container_name, + blob_name=blob_name) self.container_name = container_name self.blob_name = blob_name - try: - self.snapshot = snapshot.snapshot # type: ignore - except AttributeError: - try: - self.snapshot = snapshot['snapshot'] # type: ignore - except TypeError: - self.snapshot = snapshot or path_snapshot + if snapshot is not None and hasattr(snapshot, 'snapshot'): + self.snapshot = snapshot.snapshot + elif isinstance(snapshot, dict): + self.snapshot = snapshot['snapshot'] + else: + self.snapshot = snapshot or path_snapshot + self.version_id = kwargs.pop('version_id', None) + + # This parameter is used for the hierarchy traversal. Give precedence to credential. + self._raw_credential = credential if credential else sas_token self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot) super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access - - def _format_url(self, hostname): - container_name = self.container_name - if isinstance(container_name, six.text_type): - container_name = container_name.encode('UTF-8') - return "{}://{}/{}/{}{}".format( - self.scheme, - hostname, - quote(container_name), - quote(self.blob_name, safe='~/'), - self._query_str) - - def _encode_source_url(self, source_url): - parsed_source_url = urlparse(source_url) - source_scheme = parsed_source_url.scheme - source_hostname = parsed_source_url.netloc.rstrip('/') - source_path = unquote(parsed_source_url.path) - source_query = parsed_source_url.query - result = ["{}://{}{}".format(source_scheme, source_hostname, quote(source_path, safe='~/'))] - if source_query: - result.append(source_query) - return '?'.join(result) + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._configure_encryption(kwargs) + + def _format_url(self, hostname: str) -> str: + return _format_url( + container_name=self.container_name, + scheme=self.scheme, + blob_name=self.blob_name, + query_str=self._query_str, + hostname=hostname + ) @classmethod - def from_blob_url(cls, blob_url, credential=None, snapshot=None, **kwargs): - # type: (str, Optional[Any], Optional[Union[str, Dict[str, Any]]], Any) -> BlobClient + def from_blob_url( + cls, blob_url: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + snapshot: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> Self: """Create BlobClient from a blob url. This doesn't support customized blob url with '/' in blob name. :param str blob_url: @@ -205,58 +216,30 @@ def from_blob_url(cls, blob_url, credential=None, snapshot=None, **kwargs): The credentials with which to authenticate. This is optional if the account URL already has a SAS token, or the connection string already has shared access key values. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account shared access - key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None :param str snapshot: The optional blob snapshot on which to operate. This can be the snapshot ID string or the response returned from :func:`create_snapshot`. If specified, this will override the snapshot in the url. + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. :returns: A Blob client. :rtype: ~azure.storage.blob.BlobClient """ - try: - if not blob_url.lower().startswith('http'): - blob_url = "https://" + blob_url - except AttributeError: - raise ValueError("Blob URL must be a string.") - parsed_url = urlparse(blob_url.rstrip('/')) - - if not parsed_url.netloc: - raise ValueError("Invalid URL: {}".format(blob_url)) - - account_path = "" - if ".core." in parsed_url.netloc: - # .core. is indicating non-customized url. Blob name with directory info can also be parsed. - path_blob = parsed_url.path.lstrip('/').split('/', 1) - elif "localhost" in parsed_url.netloc or "127.0.0.1" in parsed_url.netloc: - path_blob = parsed_url.path.lstrip('/').split('/', 2) - account_path += '/' + path_blob[0] - else: - # for customized url. blob name that has directory info cannot be parsed. - path_blob = parsed_url.path.lstrip('/').split('/') - if len(path_blob) > 2: - account_path = "/" + "/".join(path_blob[:-2]) - account_url = "{}://{}{}?{}".format( - parsed_url.scheme, - parsed_url.netloc.rstrip('/'), - account_path, - parsed_url.query) - container_name, blob_name = unquote(path_blob[-2]), unquote(path_blob[-1]) - if not container_name or not blob_name: - raise ValueError("Invalid URL. Provide a blob_url with a valid blob and container name.") - - path_snapshot, _ = parse_query(parsed_url.query) - if snapshot: - try: - path_snapshot = snapshot.snapshot # type: ignore - except AttributeError: - try: - path_snapshot = snapshot['snapshot'] # type: ignore - except TypeError: - path_snapshot = snapshot - + account_url, container_name, blob_name, path_snapshot = _from_blob_url(blob_url=blob_url, snapshot=snapshot) return cls( account_url, container_name=container_name, blob_name=blob_name, snapshot=path_snapshot, credential=credential, **kwargs @@ -264,13 +247,13 @@ def from_blob_url(cls, blob_url, credential=None, snapshot=None, **kwargs): @classmethod def from_connection_string( - cls, conn_str, # type: str - container_name, # type: str - blob_name, # type: str - snapshot=None, # type: Optional[str] - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): # type: (...) -> BlobClient + cls, conn_str: str, + container_name: str, + blob_name: str, + snapshot: Optional[Union[str, Dict[str, Any]]] = None, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: """Create BlobClient from a Connection String. :param str conn_str: @@ -286,9 +269,21 @@ def from_connection_string( The credentials with which to authenticate. This is optional if the account URL already has a SAS token, or the connection string already has shared access key values. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account shared access - key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. Credentials provided here will take precedence over those in the connection string. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. :returns: A Blob client. :rtype: ~azure.storage.blob.BlobClient @@ -310,8 +305,7 @@ def from_connection_string( ) @distributed_trace - def get_account_information(self, **kwargs): - # type: (**Any) -> Dict[str, str] + def get_account_information(self, **kwargs: Any) -> Dict[str, str]: """Gets information related to the storage account in which the blob resides. The information can also be retrieved if the user has a SAS to a container or blob. @@ -321,141 +315,17 @@ def get_account_information(self, **kwargs): :rtype: dict(str, str) """ try: - return self._client.blob.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + return cast(Dict[str, str], self._client.blob.get_account_info(cls=return_response_headers, **kwargs)) except HttpResponseError as error: process_storage_error(error) - def _upload_blob_options( # pylint:disable=too-many-statements - self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] - length=None, # type: Optional[int] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption and not self.key_encryption_key: - raise ValueError("Encryption required but no key was provided.") - encryption_options = { - 'required': self.require_encryption, - 'key': self.key_encryption_key, - 'resolver': self.key_resolver_function, - } - if self.key_encryption_key is not None: - cek, iv, encryption_data = generate_blob_encryption_data(self.key_encryption_key) - encryption_options['cek'] = cek - encryption_options['vector'] = iv - encryption_options['data'] = encryption_data - - encoding = kwargs.pop('encoding', 'UTF-8') - if isinstance(data, six.text_type): - data = data.encode(encoding) # type: ignore - if length is None: - length = get_length(data) - if isinstance(data, bytes): - data = data[:length] - - if isinstance(data, bytes): - stream = BytesIO(data) - elif hasattr(data, 'read'): - stream = data - elif hasattr(data, '__iter__'): - stream = IterStreamer(data, encoding=encoding) - else: - raise TypeError("Unsupported data type: {}".format(type(data))) - - validate_content = kwargs.pop('validate_content', False) - content_settings = kwargs.pop('content_settings', None) - overwrite = kwargs.pop('overwrite', False) - max_concurrency = kwargs.pop('max_concurrency', 1) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - kwargs['cpk_info'] = cpk_info - - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - kwargs['lease_access_conditions'] = get_access_conditions(kwargs.pop('lease', None)) - kwargs['modified_access_conditions'] = get_modify_conditions(kwargs) - kwargs['cpk_scope_info'] = get_cpk_scope_info(kwargs) - if content_settings: - kwargs['blob_headers'] = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=bytearray(content_settings.content_md5) if content_settings.content_md5 else None, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - kwargs['blob_tags_string'] = serialize_blob_tags_header(kwargs.pop('tags', None)) - kwargs['stream'] = stream - kwargs['length'] = length - kwargs['overwrite'] = overwrite - kwargs['headers'] = headers - kwargs['validate_content'] = validate_content - kwargs['blob_settings'] = self._config - kwargs['max_concurrency'] = max_concurrency - kwargs['encryption_options'] = encryption_options - if blob_type == BlobType.BlockBlob: - kwargs['client'] = self._client.block_blob - kwargs['data'] = data - elif blob_type == BlobType.PageBlob: - kwargs['client'] = self._client.page_blob - elif blob_type == BlobType.AppendBlob: - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - kwargs['client'] = self._client.append_blob - else: - raise ValueError("Unsupported BlobType: {}".format(blob_type)) - return kwargs - - def _upload_blob_from_url_options(self, source_url, **kwargs): - # type: (...) -> Dict[str, Any] - tier = kwargs.pop('standard_blob_tier', None) - overwrite = kwargs.pop('overwrite', False) - content_settings = kwargs.pop('content_settings', None) - if content_settings: - kwargs['blob_http_headers'] = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=None, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'content_length': 0, - 'copy_source_blob_properties': kwargs.pop('include_source_blob_properties', True), - 'source_content_md5': kwargs.pop('source_content_md5', None), - 'copy_source': source_url, - 'modified_access_conditions': get_modify_conditions(kwargs), - 'blob_tags_string': serialize_blob_tags_header(kwargs.pop('tags', None)), - 'cls': return_response_headers, - 'lease_access_conditions': get_access_conditions(kwargs.pop('destination_lease', None)), - 'tier': tier.value if tier else None, - 'source_modified_access_conditions': get_source_conditions(kwargs), - 'cpk_info': cpk_info, - 'cpk_scope_info': get_cpk_scope_info(kwargs) - } - options.update(kwargs) - if not overwrite and not _any_conditions(**options): # pylint: disable=protected-access - options['modified_access_conditions'].if_none_match = '*' - return options - @distributed_trace - def upload_blob_from_url(self, source_url, **kwargs): - # type: (str, Any) -> Dict[str, Any] + def upload_blob_from_url( + self, source_url: str, + *, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Any]: """ Creates a new Block Blob where the content of the blob is read from a given URL. The content of an existing blob is overwritten with the new blob. @@ -463,15 +333,17 @@ def upload_blob_from_url(self, source_url, **kwargs): :param str source_url: A URL of up to 2 KB in length that specifies a file or blob. The value should be URL-encoded as it would appear in a request URI. - If the source is in another account, the source must either be public - or must be authenticated via a shared access signature. If the source - is public, no authentication is required. + The source must either be public or must be authenticated via a shared + access signature as part of the url or using the source_authorization keyword. + If the source is public, no authentication is required. Examples: https://myaccount.blob.core.windows.net/mycontainer/myblob https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken + :keyword dict(str, str) metadata: + Name-value pairs associated with the blob as metadata. :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob will overwrite the existing data. If set to False, the operation will fail with ResourceExistsError. @@ -482,7 +354,7 @@ def upload_blob_from_url(self, source_url, **kwargs): The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) :paramtype tags: dict(str, str) :keyword bytearray source_content_md5: Specify the md5 that is used to verify the integrity of the source bytes. @@ -526,7 +398,11 @@ def upload_blob_from_url(self, source_url, **kwargs): valid, the operation fails with status code 412 (Precondition Failed). :paramtype destination_lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword ~azure.storage.blob.ContentSettings content_settings: ContentSettings object used to set blob properties. Used to set content type, encoding, language, disposition, md5, and cache control. @@ -543,27 +419,35 @@ def upload_blob_from_url(self, source_url, **kwargs): :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: A standard blob tier value to set the blob to. For this version of the library, this is only applicable to block blobs on standard storage accounts. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Blob-updated property Dict (Etag and last modified) + :rtype: Dict[str, Any] """ - options = self._upload_blob_from_url_options( - source_url=self._encode_source_url(source_url), + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_blob_from_url_options( + source_url=source_url, + metadata=metadata, **kwargs) try: - return self._client.block_blob.put_blob_from_url(**options) + return cast(Dict[str, Any], self._client.block_blob.put_blob_from_url(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def upload_blob( # pylint: disable=too-many-locals - self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] - length=None, # type: Optional[int] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Any + def upload_blob( + self, data: Union[bytes, str, Iterable[AnyStr], IO[bytes]], + blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, + length: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Creates a new blob from a data source with automatic chunking. :param data: The blob data to upload. + :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. :param int length: @@ -577,7 +461,7 @@ def upload_blob( # pylint: disable=too-many-locals The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -635,6 +519,20 @@ def upload_blob( # pylint: disable=too-many-locals :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: A standard blob tier value to set the blob to. For this version of the library, this is only applicable to block blobs on standard storage accounts. + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword int maxsize_condition: Optional conditional header. The max length in bytes permitted for the append blob. If the Append Block operation would cause the blob @@ -642,8 +540,9 @@ def upload_blob( # pylint: disable=too-many-locals value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). :keyword int max_concurrency: - Maximum number of parallel connections to use when the blob size exceeds - 64MB. + Maximum number of parallel connections to use when transferring the blob in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: Encrypts the data on the service-side with the given key. Use of customer-provided keys must be done over HTTPS. @@ -659,12 +558,20 @@ def upload_blob( # pylint: disable=too-many-locals :keyword str encoding: Defaults to UTF-8. + :keyword progress_hook: + A callback to track the progress of a long running upload. The signature is + function(current: int, total: Optional[int]) where current is the number of bytes transferred + so far, and total is the size of the blob or None if the size is unknown. + :paramtype progress_hook: Callable[[int, Optional[int]], None] :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make - multiple calls to the Azure service and the timeout will apply to - each call individually. - :returns: Blob-updated property dict (Etag and last modified) - :rtype: dict[str, Any] + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. + :returns: Blob-updated property Dict (Etag and last modified) + :rtype: Dict[str, Any] .. admonition:: Example: @@ -675,11 +582,24 @@ def upload_blob( # pylint: disable=too-many-locals :dedent: 12 :caption: Upload a blob to the container. """ - options = self._upload_blob_options( - data, + if self.require_encryption and not self.key_encryption_key: + raise ValueError("Encryption required but no key was provided.") + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_blob_options( + data=data, blob_type=blob_type, length=length, metadata=metadata, + encryption_options={ + 'required': self.require_encryption, + 'version': self.encryption_version, + 'key': self.key_encryption_key, + 'resolver': self.key_resolver_function + }, + config=self._config, + sdk_moniker=self._sdk_moniker, + client=self._client, **kwargs) if blob_type == BlobType.BlockBlob: return upload_block_blob(**options) @@ -687,56 +607,37 @@ def upload_blob( # pylint: disable=too-many-locals return upload_page_blob(**options) return upload_append_blob(**options) - def _download_blob_options(self, offset=None, length=None, **kwargs): - # type: (Optional[int], Optional[int], **Any) -> Dict[str, Any] - if self.require_encryption and not self.key_encryption_key: - raise ValueError("Encryption required but no key was provided.") - if length is not None and offset is None: - raise ValueError("Offset value must not be None if length is set.") - if length is not None: - length = offset + length - 1 # Service actually uses an end-range inclusive index - - validate_content = kwargs.pop('validate_content', False) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'clients': self._client, - 'config': self._config, - 'start_range': offset, - 'end_range': length, - 'version_id': kwargs.pop('version_id', None), - 'validate_content': validate_content, - 'encryption_options': { - 'required': self.require_encryption, - 'key': self.key_encryption_key, - 'resolver': self.key_resolver_function}, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'cls': kwargs.pop('cls', None) or deserialize_blob_stream, - 'max_concurrency':kwargs.pop('max_concurrency', 1), - 'encoding': kwargs.pop('encoding', None), - 'timeout': kwargs.pop('timeout', None), - 'name': self.blob_name, - 'container': self.container_name} - options.update(kwargs) - return options + @overload + def download_blob( + self, offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: str, + **kwargs: Any + ) -> StorageStreamDownloader[str]: + ... + + @overload + def download_blob( + self, offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: None = None, + **kwargs: Any + ) -> StorageStreamDownloader[bytes]: + ... @distributed_trace - def download_blob(self, offset=None, length=None, **kwargs): - # type: (Optional[int], Optional[int], **Any) -> StorageStreamDownloader + def download_blob( + self, offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: Union[str, None] = None, + **kwargs: Any + ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: """Downloads a blob to the StorageStreamDownloader. The readall() method must be used to read all the content or readinto() must be used to download the blob into - a stream. + a stream. Using chunks() returns an iterator which allows the user to iterate over the content in chunks. :param int offset: Start of byte range to use for downloading a section of the blob. @@ -749,6 +650,7 @@ def download_blob(self, offset=None, length=None, **kwargs): value that, when present, specifies the version of the blob to download. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword bool validate_content: @@ -794,11 +696,23 @@ def download_blob(self, offset=None, length=None, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int max_concurrency: - The number of parallel connections with which to download. - :keyword str encoding: + Maximum number of parallel connections to use when transferring the blob in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. + :keyword Optional[str] encoding: Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword progress_hook: + A callback to track the progress of a long running download. The signature is + function(current: int, total: int) where current is the number of bytes transferred + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], None] :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. multiple calls to the Azure service and the timeout will apply to each call individually. :returns: A streaming object (StorageStreamDownloader) @@ -813,87 +727,60 @@ def download_blob(self, offset=None, length=None, **kwargs): :dedent: 12 :caption: Download a blob. """ - options = self._download_blob_options( + if self.require_encryption and not (self.key_encryption_key or self.key_resolver_function): + raise ValueError("Encryption required but no key was provided.") + if length is not None and offset is None: + raise ValueError("Offset value must not be None if length is set.") + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _download_blob_options( + blob_name=self.blob_name, + container_name=self.container_name, + version_id=get_version_id(self.version_id, kwargs), offset=offset, length=length, + encoding=encoding, + encryption_options={ + 'required': self.require_encryption, + 'version': self.encryption_version, + 'key': self.key_encryption_key, + 'resolver': self.key_resolver_function + }, + config=self._config, + sdk_moniker=self._sdk_moniker, + client=self._client, **kwargs) return StorageStreamDownloader(**options) - def _quick_query_options(self, query_expression, - **kwargs): - # type: (str, **Any) -> Dict[str, Any] - delimiter = '\n' - input_format = kwargs.pop('blob_format', None) - if input_format: - try: - delimiter = input_format.lineterminator - except AttributeError: - try: - delimiter = input_format.delimiter - except AttributeError: - raise ValueError("The Type of blob_format can only be DelimitedTextDialect or DelimitedJsonDialect") - output_format = kwargs.pop('output_format', None) - if output_format: - try: - delimiter = output_format.lineterminator - except AttributeError: - try: - delimiter = output_format.delimiter - except AttributeError: - pass - else: - output_format = input_format - query_request = QueryRequest( - expression=query_expression, - input_serialization=serialize_query_format(input_format), - output_serialization=serialize_query_format(output_format) - ) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo( - encryption_key=cpk.key_value, - encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm - ) - options = { - 'query_request': query_request, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'snapshot': self.snapshot, - 'timeout': kwargs.pop('timeout', None), - 'cls': return_headers_and_deserialized, - } - options.update(kwargs) - return options, delimiter - @distributed_trace - def query_blob(self, query_expression, **kwargs): - # type: (str, **Any) -> BlobQueryReader + def query_blob(self, query_expression: str, **kwargs: Any) -> BlobQueryReader: """Enables users to select/project on blob/or blob snapshot data by providing simple query expressions. This operations returns a BlobQueryReader, users need to use readall() or readinto() to get query data. :param str query_expression: - Required. a query statement. + Required. a query statement. For more details see + https://learn.microsoft.com/azure/storage/blobs/query-acceleration-sql-reference. :keyword Callable[~azure.storage.blob.BlobQueryError] on_error: A function to be called on any processing errors returned by the service. :keyword blob_format: Optional. Defines the serialization of the data currently stored in the blob. The default is to treat the blob data as CSV data formatted in the default dialect. This can be overridden with - a custom DelimitedTextDialect, or alternatively a DelimitedJsonDialect. + a custom DelimitedTextDialect, or DelimitedJsonDialect or "ParquetDialect" (passed as a string or enum). + These dialects can be passed through their respective classes, the QuickQueryDialect enum or as a string + + .. note:: + "ParquetDialect" is in preview, so some features may not work as intended. + :paramtype blob_format: ~azure.storage.blob.DelimitedTextDialect or ~azure.storage.blob.DelimitedJsonDialect + or ~azure.storage.blob.QuickQueryDialect or str :keyword output_format: Optional. Defines the output serialization for the data stream. By default the data will be returned - as it is represented in the blob. By providing an output format, the blob data will be reformatted - according to that profile. This value can be a DelimitedTextDialect or a DelimitedJsonDialect. - :paramtype output_format: ~azure.storage.blob.DelimitedTextDialect, ~azure.storage.blob.DelimitedJsonDialect - or list[~azure.storage.blob.ArrowDialect] + as it is represented in the blob (Parquet formats default to DelimitedTextDialect). + By providing an output format, the blob data will be reformatted according to that profile. + This value can be a DelimitedTextDialect or a DelimitedJsonDialect or ArrowDialect. + These dialects can be passed through their respective classes, the QuickQueryDialect enum or as a string + :paramtype output_format: ~azure.storage.blob.DelimitedTextDialect or ~azure.storage.blob.DelimitedJsonDialect + or List[~azure.storage.blob.ArrowDialect] or ~azure.storage.blob.QuickQueryDialect or str :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -927,7 +814,11 @@ def query_blob(self, query_expression, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A streaming object (BlobQueryReader) :rtype: ~azure.storage.blob.BlobQueryReader @@ -943,7 +834,9 @@ def query_blob(self, query_expression, **kwargs): errors = kwargs.pop("on_error", None) error_cls = kwargs.pop("error_cls", BlobQueryError) encoding = kwargs.pop("encoding", None) - options, delimiter = self._quick_query_options(query_expression, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options, delimiter = _quick_query_options(self.snapshot, query_expression, **kwargs) try: headers, raw_response_body = self._client.blob.query(**options) except HttpResponseError as error: @@ -958,35 +851,8 @@ def query_blob(self, query_expression, **kwargs): response=raw_response_body, error_cls=error_cls) - @staticmethod - def _generic_delete_blob_options(delete_snapshots=False, **kwargs): - # type: (bool, **Any) -> Dict[str, Any] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if delete_snapshots: - delete_snapshots = DeleteSnapshotsOptionType(delete_snapshots) - options = { - 'timeout': kwargs.pop('timeout', None), - 'snapshot': kwargs.pop('snapshot', None), # this is added for delete_blobs - 'delete_snapshots': delete_snapshots or None, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions} - options.update(kwargs) - return options - - def _delete_blob_options(self, delete_snapshots=False, **kwargs): - # type: (bool, **Any) -> Dict[str, Any] - if self.snapshot and delete_snapshots: - raise ValueError("The delete_snapshots option cannot be used with a specific snapshot.") - options = self._generic_delete_blob_options(delete_snapshots, **kwargs) - options['snapshot'] = self.snapshot - options['version_id'] = kwargs.pop('version_id', None) - options['blob_delete_type'] = kwargs.pop('blob_delete_type', None) - return options - @distributed_trace - def delete_blob(self, delete_snapshots=False, **kwargs): - # type: (str, **Any) -> None + def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: Any) -> None: """Marks the specified blob for deletion. The blob is later deleted during garbage collection. @@ -1000,15 +866,16 @@ def delete_blob(self, delete_snapshots=False, **kwargs): Soft deleted blob is accessible through :func:`~ContainerClient.list_blobs()` specifying `include=['deleted']` option. Soft-deleted blob can be restored using :func:`undelete` operation. - :param str delete_snapshots: + :param Optional[str] delete_snapshots: Required if the blob has associated snapshots. Values include: - "only": Deletes only the blobs snapshots. - "include": Deletes the blob along with all snapshots. - :keyword str version_id: + :keyword Optional[str] version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to delete. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword lease: @@ -1040,7 +907,11 @@ def delete_blob(self, delete_snapshots=False, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -1052,22 +923,33 @@ def delete_blob(self, delete_snapshots=False, **kwargs): :dedent: 12 :caption: Delete a blob. """ - options = self._delete_blob_options(delete_snapshots=delete_snapshots, **kwargs) + options = _delete_blob_options( + snapshot=self.snapshot, + version_id=get_version_id(self.version_id, kwargs), + delete_snapshots=delete_snapshots, + **kwargs) try: self._client.blob.delete(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def undelete_blob(self, **kwargs): - # type: (**Any) -> None + def undelete_blob(self, **kwargs: Any) -> None: """Restores soft-deleted blobs or snapshots. Operation will only be successful if used within the specified number of days set in the delete retention policy. + If blob versioning is enabled, the base blob cannot be restored using this + method. Instead use :func:`start_copy_from_url` with the URL of the blob version + you wish to promote to the current version. + :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -1084,25 +966,34 @@ def undelete_blob(self, **kwargs): except HttpResponseError as error: process_storage_error(error) - @distributed_trace() - def exists(self, **kwargs): - # type: (**Any) -> bool + @distributed_trace + def exists(self, **kwargs: Any) -> bool: """ Returns True if a blob exists with the defined parameters, and returns False otherwise. - :param str version_id: + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to check if it exists. - :param int timeout: - The timeout parameter is expressed in seconds. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: boolean + :rtype: bool """ + version_id = get_version_id(self.version_id, kwargs) try: self._client.blob.get_properties( snapshot=self.snapshot, + version_id=version_id, **kwargs) return True + # Encrypted with CPK + except ResourceExistsError: + return True except HttpResponseError as error: try: process_storage_error(error) @@ -1110,8 +1001,7 @@ def exists(self, **kwargs): return False @distributed_trace - def get_blob_properties(self, **kwargs): - # type: (**Any) -> BlobProperties + def get_blob_properties(self, **kwargs: Any) -> BlobProperties: """Returns all user-defined metadata, standard HTTP properties, and system properties for the blob. It does not return the content of the blob. @@ -1124,6 +1014,7 @@ def get_blob_properties(self, **kwargs): value that, when present, specifies the version of the blob to get properties. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword ~datetime.datetime if_modified_since: @@ -1155,7 +1046,11 @@ def get_blob_properties(self, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: BlobProperties :rtype: ~azure.storage.blob.BlobProperties @@ -1171,6 +1066,7 @@ def get_blob_properties(self, **kwargs): # TODO: extract this out as _get_blob_properties_options access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) + version_id = get_version_id(self.version_id, kwargs) cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: @@ -1182,49 +1078,25 @@ def get_blob_properties(self, **kwargs): cls_method = kwargs.pop('cls', None) if cls_method: kwargs['cls'] = partial(deserialize_pipeline_response_into_cls, cls_method) - blob_props = self._client.blob.get_properties( + blob_props = cast(BlobProperties, self._client.blob.get_properties( timeout=kwargs.pop('timeout', None), - version_id=kwargs.pop('version_id', None), + version_id=version_id, snapshot=self.snapshot, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, cls=kwargs.pop('cls', None) or deserialize_blob_properties, cpk_info=cpk_info, - **kwargs) + **kwargs)) except HttpResponseError as error: process_storage_error(error) blob_props.name = self.blob_name if isinstance(blob_props, BlobProperties): blob_props.container = self.container_name blob_props.snapshot = self.snapshot - return blob_props # type: ignore - - def _set_http_headers_options(self, content_settings=None, **kwargs): - # type: (Optional[ContentSettings], **Any) -> Dict[str, Any] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - blob_headers = None - if content_settings: - blob_headers = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=bytearray(content_settings.content_md5) if content_settings.content_md5 else None, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - options = { - 'timeout': kwargs.pop('timeout', None), - 'blob_http_headers': blob_headers, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) - return options + return blob_props @distributed_trace - def set_http_headers(self, content_settings=None, **kwargs): - # type: (Optional[ContentSettings], **Any) -> None + def set_http_headers(self, content_settings: Optional["ContentSettings"] = None, **kwargs: Any) -> Dict[str, Any]: """Sets system properties on the blob. If one property is set for the content_settings, all properties will be overridden. @@ -1260,45 +1132,25 @@ def set_http_headers(self, content_settings=None, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Any] """ - options = self._set_http_headers_options(content_settings=content_settings, **kwargs) + options = _set_http_headers_options(content_settings=content_settings, **kwargs) try: - return self._client.blob.set_http_headers(**options) # type: ignore + return cast(Dict[str, Any], self._client.blob.set_http_headers(**options)) except HttpResponseError as error: process_storage_error(error) - def _set_blob_metadata_options(self, metadata=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) - return options - @distributed_trace - def set_blob_metadata(self, metadata=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Union[str, datetime]] + def set_blob_metadata( + self, metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets user-defined metadata for the blob as one or more name-value pairs. :param metadata: @@ -1347,83 +1199,114 @@ def set_blob_metadata(self, metadata=None, **kwargs): .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified) + :rtype: Dict[str, Union[str, datetime]] """ - options = self._set_blob_metadata_options(metadata=metadata, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _set_blob_metadata_options(metadata=metadata, **kwargs) try: - return self._client.blob.set_metadata(**options) # type: ignore + return cast(Dict[str, Union[str, datetime]], self._client.blob.set_metadata(**options)) except HttpResponseError as error: process_storage_error(error) - def _create_page_blob_options( # type: ignore - self, size, # type: int - content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] - premium_page_blob_tier=None, # type: Optional[Union[str, PremiumPageBlobTier]] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - blob_headers = None - if content_settings: - blob_headers = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=bytearray(content_settings.content_md5) if content_settings.content_md5 else None, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) + @distributed_trace + def set_immutability_policy( + self, immutability_policy: "ImmutabilityPolicy", + **kwargs: Any + ) -> Dict[str, str]: + """The Set Immutability Policy operation sets the immutability policy on the blob. - sequence_number = kwargs.pop('sequence_number', None) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. - if premium_page_blob_tier: - try: - headers['x-ms-access-tier'] = premium_page_blob_tier.value # type: ignore - except AttributeError: - headers['x-ms-access-tier'] = premium_page_blob_tier # type: ignore - - blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) - - options = { - 'content_length': 0, - 'blob_content_length': size, - 'blob_sequence_number': sequence_number, - 'blob_http_headers': blob_headers, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'blob_tags_string': blob_tags_string, - 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) - return options + :param ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + version_id = get_version_id(self.version_id, kwargs) + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + return cast(Dict[str, str], self._client.blob.set_immutability_policy( + cls=return_response_headers, version_id=version_id, **kwargs)) @distributed_trace - def create_page_blob( # type: ignore - self, size, # type: int - content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] - premium_page_blob_tier=None, # type: Optional[Union[str, PremiumPageBlobTier]] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + def delete_immutability_policy(self, **kwargs: Any) -> None: + """The Delete Immutability Policy operation deletes the immutability policy on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + version_id = get_version_id(self.version_id, kwargs) + self._client.blob.delete_immutability_policy(version_id=version_id, **kwargs) + + @distributed_trace + def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str, datetime, bool]]: + """The Set Legal Hold operation sets a legal hold on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :param bool legal_hold: + Specified if a legal hold should be set on the blob. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, Union[str, datetime, bool]] + """ + + version_id = get_version_id(self.version_id, kwargs) + return cast(Dict[str, Union[str, datetime, bool]], self._client.blob.set_legal_hold( + legal_hold, version_id=version_id, cls=return_response_headers, **kwargs)) + + @distributed_trace + def create_page_blob( + self, size: int, + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Creates a new Page Blob of the specified size. :param int size: @@ -1444,7 +1327,7 @@ def create_page_blob( # type: ignore The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -1457,6 +1340,18 @@ def create_page_blob( # type: ignore Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -1488,68 +1383,38 @@ def create_page_blob( # type: ignore .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict[str, Any] """ - options = self._create_page_blob_options( - size, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _create_page_blob_options( + size=size, content_settings=content_settings, metadata=metadata, premium_page_blob_tier=premium_page_blob_tier, **kwargs) try: - return self._client.page_blob.create(**options) # type: ignore + return cast(Dict[str, Any], self._client.page_blob.create(**options)) except HttpResponseError as error: process_storage_error(error) - def _create_append_blob_options(self, content_settings=None, metadata=None, **kwargs): - # type: (Optional[ContentSettings], Optional[Dict[str, str]], **Any) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - blob_headers = None - if content_settings: - blob_headers = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=bytearray(content_settings.content_md5) if content_settings.content_md5 else None, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) - - options = { - 'content_length': 0, - 'blob_http_headers': blob_headers, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'blob_tags_string': blob_tags_string, - 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) - return options - @distributed_trace - def create_append_blob(self, content_settings=None, metadata=None, **kwargs): - # type: (Optional[ContentSettings], Optional[Dict[str, str]], **Any) -> Dict[str, Union[str, datetime]] - """Creates a new Append Blob. + def create_append_blob( + self, content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: + """Creates a new Append Blob. This operation creates a new 0-length append blob. The content + of any existing blob is overwritten with the newly initialized append blob. To add content to + the append blob, call the :func:`append_block` or :func:`append_block_from_url` method. :param ~azure.storage.blob.ContentSettings content_settings: ContentSettings object used to set blob properties. Used to set content type, encoding, @@ -1562,7 +1427,7 @@ def create_append_blob(self, content_settings=None, metadata=None, **kwargs): The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -1571,6 +1436,18 @@ def create_append_blob(self, content_settings=None, metadata=None, **kwargs): Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -1602,48 +1479,32 @@ def create_append_blob(self, content_settings=None, metadata=None, **kwargs): .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict[str, Any] """ - options = self._create_append_blob_options( + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _create_append_blob_options( content_settings=content_settings, metadata=metadata, **kwargs) try: - return self._client.append_blob.create(**options) # type: ignore + return cast(Dict[str, Union[str, datetime]], self._client.append_blob.create(**options)) except HttpResponseError as error: process_storage_error(error) - def _create_snapshot_options(self, metadata=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) - return options - @distributed_trace - def create_snapshot(self, metadata=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Union[str, datetime]] + def create_snapshot( + self, metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Creates a snapshot of the blob. A snapshot is a read-only version of a blob that's taken at a point in time. @@ -1697,7 +1558,11 @@ def create_snapshot(self, metadata=None, **kwargs): .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Snapshot ID, Etag, and last modified). :rtype: dict[str, Any] @@ -1710,58 +1575,29 @@ def create_snapshot(self, metadata=None, **kwargs): :dedent: 8 :caption: Create a snapshot of the blob. """ - options = self._create_snapshot_options(metadata=metadata, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _create_snapshot_options(metadata=metadata, **kwargs) try: - return self._client.blob.create_snapshot(**options) # type: ignore + return cast(Dict[str, Any], self._client.blob.create_snapshot(**options)) except HttpResponseError as error: process_storage_error(error) - def _start_copy_from_url_options(self, source_url, metadata=None, incremental_copy=False, **kwargs): - # type: (str, Optional[Dict[str, str]], bool, **Any) -> Dict[str, Any] - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - if 'source_lease' in kwargs: - source_lease = kwargs.pop('source_lease') - try: - headers['x-ms-source-lease-id'] = source_lease.id # type: str - except AttributeError: - headers['x-ms-source-lease-id'] = source_lease - - tier = kwargs.pop('premium_page_blob_tier', None) or kwargs.pop('standard_blob_tier', None) - - if kwargs.get('requires_sync'): - headers['x-ms-requires-sync'] = str(kwargs.pop('requires_sync')) - - timeout = kwargs.pop('timeout', None) - dest_mod_conditions = get_modify_conditions(kwargs) - blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) - - options = { - 'copy_source': source_url, - 'seal_blob': kwargs.pop('seal_destination_blob', None), - 'timeout': timeout, - 'modified_access_conditions': dest_mod_conditions, - 'blob_tags_string': blob_tags_string, - 'headers': headers, - 'cls': return_response_headers, - } - if not incremental_copy: - source_mod_conditions = get_source_conditions(kwargs) - dest_access_conditions = get_access_conditions(kwargs.pop('destination_lease', None)) - options['source_modified_access_conditions'] = source_mod_conditions - options['lease_access_conditions'] = dest_access_conditions - options['tier'] = tier.value if tier else None - options.update(kwargs) - return options - @distributed_trace - def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, **kwargs): - # type: (str, Optional[Dict[str, str]], bool, **Any) -> Dict[str, Union[str, datetime]] - """Copies a blob asynchronously. - - This operation returns a copy operation - object that can be used to wait on the completion of the operation, - as well as check status or abort the copy operation. + def start_copy_from_url( + self, source_url: str, + metadata: Optional[Dict[str, str]] = None, + incremental_copy: bool = False, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: + """Copies a blob from the given URL. + + This operation returns a dictionary containing `copy_status` and `copy_id`, + which can be used to check the status of or abort the copy operation. + `copy_status` will be 'success' if the copy completed synchronously or + 'pending' if the copy has been started asynchronously. For asynchronous copies, + the status can be checked by polling the :func:`get_blob_properties` method and + checking the copy status. Set `requires_sync` to True to force the copy to be synchronous. The Blob service copies blobs on a best-effort basis. The source blob for a copy operation may be a block blob, an append blob, @@ -1784,10 +1620,6 @@ def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, end of the copy operation, the destination blob will have the same committed block count as the source. - For all blob types, you can call status() on the returned polling object - to check the status of the copy operation, or wait() to block until the - operation is complete. The final blob will be committed when the copy completes. - :param str source_url: A URL of up to 2 KB in length that specifies a file or blob. The value should be URL-encoded as it would appear in a request URI. @@ -1818,11 +1650,26 @@ def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_). + + The (case-sensitive) literal "COPY" can instead be passed to copy tags from the source blob. + This option is only available when `incremental_copy=False` and `requires_sync=True`. .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) + :paramtype tags: dict(str, str) or Literal["COPY"] + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword ~datetime.datetime source_if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -1871,7 +1718,11 @@ def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, the lease ID given matches the active lease ID of the source blob. :paramtype source_lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -1888,8 +1739,23 @@ def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, :keyword bool requires_sync: Enforces that the service will not return a response until the copy is complete. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. This option is only available when `incremental_copy` is + set to False and `requires_sync` is set to True. + + .. versionadded:: 12.9.0 + + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the sync copied blob. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.10.0 + :returns: A dictionary of copy properties (etag, last_modified, copy_id, copy_status). - :rtype: dict[str, str or ~datetime.datetime] + :rtype: dict[str, Union[str, ~datetime.datetime]] .. admonition:: Example: @@ -1900,38 +1766,23 @@ def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, :dedent: 12 :caption: Copy a blob from a URL. """ - options = self._start_copy_from_url_options( - source_url=self._encode_source_url(source_url), + options = _start_copy_from_url_options( + source_url=source_url, metadata=metadata, incremental_copy=incremental_copy, **kwargs) try: if incremental_copy: - return self._client.page_blob.copy_incremental(**options) - return self._client.blob.start_copy_from_url(**options) + return cast(Dict[str, Union[str, datetime]], self._client.page_blob.copy_incremental(**options)) + return cast(Dict[str, Union[str, datetime]], self._client.blob.start_copy_from_url(**options)) except HttpResponseError as error: process_storage_error(error) - def _abort_copy_options(self, copy_id, **kwargs): - # type: (Union[str, Dict[str, Any], BlobProperties], **Any) -> Dict[str, Any] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - try: - copy_id = copy_id.copy.id - except AttributeError: - try: - copy_id = copy_id['copy_id'] - except TypeError: - pass - options = { - 'copy_id': copy_id, - 'lease_access_conditions': access_conditions, - 'timeout': kwargs.pop('timeout', None)} - options.update(kwargs) - return options - @distributed_trace - def abort_copy(self, copy_id, **kwargs): - # type: (Union[str, Dict[str, Any], BlobProperties], **Any) -> None + def abort_copy( + self, copy_id: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any + ) -> None: """Abort an ongoing copy operation. This will leave a destination blob with zero length and full metadata. @@ -1952,15 +1803,14 @@ def abort_copy(self, copy_id, **kwargs): :dedent: 12 :caption: Abort copying a blob from URL. """ - options = self._abort_copy_options(copy_id, **kwargs) + options = _abort_copy_options(copy_id, **kwargs) try: self._client.blob.abort_copy_from_url(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): - # type: (int, Optional[str], **Any) -> BlobLeaseClient + def acquire_lease(self, lease_duration: int =-1, lease_id: Optional[str] = None, **kwargs: Any) -> BlobLeaseClient: """Requests a new lease. If the blob does not have an active lease, the Blob @@ -1999,7 +1849,11 @@ def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A BlobLeaseClient object. :rtype: ~azure.storage.blob.BlobLeaseClient @@ -2012,13 +1866,12 @@ def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): :dedent: 8 :caption: Acquiring a lease on a blob. """ - lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore + lease = BlobLeaseClient(self, lease_id=lease_id) lease.acquire(lease_duration=lease_duration, **kwargs) return lease @distributed_trace - def set_standard_blob_tier(self, standard_blob_tier, **kwargs): - # type: (Union[str, StandardBlobTier], Any) -> None + def set_standard_blob_tier(self, standard_blob_tier: Union[str, "StandardBlobTier"], **kwargs: Any) -> None: """This operation sets the tier on a block blob. A block blob's tier determines Hot/Cool/Archive storage type. @@ -2039,14 +1892,20 @@ def set_standard_blob_tier(self, standard_blob_tier, **kwargs): value that, when present, specifies the version of the blob to download. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword str if_tags_match_condition: Specify a SQL where clause on blob tags to operate only on blob with a matching value. eg. ``\"\\\"tagname\\\"='my tag'\"`` .. versionadded:: 12.4.0 + :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -2055,6 +1914,7 @@ def set_standard_blob_tier(self, standard_blob_tier, **kwargs): """ access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) + version_id = get_version_id(self.version_id, kwargs) if standard_blob_tier is None: raise ValueError("A StandardBlobTier must be specified") if self.snapshot and kwargs.get('version_id'): @@ -2066,69 +1926,25 @@ def set_standard_blob_tier(self, standard_blob_tier, **kwargs): timeout=kwargs.pop('timeout', None), modified_access_conditions=mod_conditions, lease_access_conditions=access_conditions, + version_id=version_id, **kwargs) except HttpResponseError as error: process_storage_error(error) - def _stage_block_options( - self, block_id, # type: str - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - block_id = encode_base64(str(block_id)) - if isinstance(data, six.text_type): - data = data.encode(kwargs.pop('encoding', 'UTF-8')) # type: ignore - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - if length is None: - length = get_length(data) - if length is None: - length, data = read_length(data) - if isinstance(data, bytes): - data = data[:length] - - validate_content = kwargs.pop('validate_content', False) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'block_id': block_id, - 'content_length': length, - 'body': data, - 'transactional_content_md5': None, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'validate_content': validate_content, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - } - options.update(kwargs) - return options - @distributed_trace def stage_block( - self, block_id, # type: str - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Dict[str, Any] + self, block_id: str, + data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + length: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Creates a new block to be committed as part of a blob. :param str block_id: A string value that identifies the block. The string should be less than or equal to 64 bytes in size. For a given blob, the block_id must be the same size for each block. :param data: The blob data. + :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] :param int length: Size of the block. :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the blob. The storage @@ -2159,72 +1975,37 @@ def stage_block( .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob property dict. :rtype: dict[str, Any] """ - options = self._stage_block_options( - block_id, - data, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _stage_block_options( + block_id=block_id, + data=data, length=length, **kwargs) try: - return self._client.block_blob.stage_block(**options) + return cast(Dict[str, Any], self._client.block_blob.stage_block(**options)) except HttpResponseError as error: process_storage_error(error) - def _stage_block_from_url_options( - self, block_id, # type: str - source_url, # type: str - source_offset=None, # type: Optional[int] - source_length=None, # type: Optional[int] - source_content_md5=None, # type: Optional[Union[bytes, bytearray]] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if source_length is not None and source_offset is None: - raise ValueError("Source offset value must not be None if length is set.") - if source_length is not None: - source_length = source_offset + source_length - 1 - block_id = encode_base64(str(block_id)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - range_header = None - if source_offset is not None: - range_header, _ = validate_and_format_range_headers(source_offset, source_length) - - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'block_id': block_id, - 'content_length': 0, - 'source_url': source_url, - 'source_range': range_header, - 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - } - options.update(kwargs) - return options - @distributed_trace def stage_block_from_url( - self, block_id, # type: str - source_url, # type: str - source_offset=None, # type: Optional[int] - source_length=None, # type: Optional[int] - source_content_md5=None, # type: Optional[Union[bytes, bytearray]] - **kwargs - ): - # type: (...) -> Dict[str, Any] + self, block_id: str, + source_url: str, + source_offset: Optional[int] = None, + source_length: Optional[int] = None, + source_content_md5: Optional[Union[bytes, bytearray]] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Creates a new block to be committed as part of a blob where the contents are read from a URL. @@ -2257,35 +2038,36 @@ def stage_block_from_url( .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. :returns: Blob property dict. :rtype: dict[str, Any] """ - options = self._stage_block_from_url_options( - block_id, - source_url=self._encode_source_url(source_url), + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _stage_block_from_url_options( + block_id=block_id, + source_url=source_url, source_offset=source_offset, source_length=source_length, source_content_md5=source_content_md5, **kwargs) try: - return self._client.block_blob.stage_block_from_url(**options) + return cast(Dict[str, Any], self._client.block_blob.stage_block_from_url(**options)) except HttpResponseError as error: process_storage_error(error) - def _get_block_list_result(self, blocks): - # type: (BlockList) -> Tuple[List[BlobBlock], List[BlobBlock]] - committed = [] # type: List - uncommitted = [] # type: List - if blocks.committed_blocks: - committed = [BlobBlock._from_generated(b) for b in blocks.committed_blocks] # pylint: disable=protected-access - if blocks.uncommitted_blocks: - uncommitted = [BlobBlock._from_generated(b) for b in blocks.uncommitted_blocks] # pylint: disable=protected-access - return committed, uncommitted - @distributed_trace - def get_block_list(self, block_list_type="committed", **kwargs): - # type: (Optional[str], **Any) -> Tuple[List[BlobBlock], List[BlobBlock]] + def get_block_list( + self, block_list_type: str = "committed", + **kwargs: Any + ) -> Tuple[List[BlobBlock], List[BlobBlock]]: """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block blob. @@ -2303,9 +2085,13 @@ def get_block_list(self, block_list_type="committed", **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A tuple of two lists - committed and uncommitted blocks - :rtype: tuple(list(~azure.storage.blob.BlobBlock), list(~azure.storage.blob.BlobBlock)) + :rtype: Tuple[List[BlobBlock], List[BlobBlock]] """ access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) @@ -2319,81 +2105,15 @@ def get_block_list(self, block_list_type="committed", **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - return self._get_block_list_result(blocks) - - def _commit_block_list_options( # type: ignore - self, block_list, # type: List[BlobBlock] - content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) - for block in block_list: - try: - if block.state.value == 'committed': - block_lookup.committed.append(encode_base64(str(block.id))) - elif block.state.value == 'uncommitted': - block_lookup.uncommitted.append(encode_base64(str(block.id))) - else: - block_lookup.latest.append(encode_base64(str(block.id))) - except AttributeError: - block_lookup.latest.append(encode_base64(str(block))) - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - blob_headers = None - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if content_settings: - blob_headers = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=bytearray(content_settings.content_md5) if content_settings.content_md5 else None, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - - validate_content = kwargs.pop('validate_content', False) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - tier = kwargs.pop('standard_blob_tier', None) - blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) - - options = { - 'blocks': block_lookup, - 'blob_http_headers': blob_headers, - 'lease_access_conditions': access_conditions, - 'timeout': kwargs.pop('timeout', None), - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers, - 'validate_content': validate_content, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'tier': tier.value if tier else None, - 'blob_tags_string': blob_tags_string, - 'headers': headers - } - options.update(kwargs) - return options + return _get_block_list_result(blocks) @distributed_trace - def commit_block_list( # type: ignore - self, block_list, # type: List[BlobBlock] - content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + def commit_block_list( + self, block_list: List[BlobBlock], + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """The Commit Block List operation writes a blob by specifying the list of block IDs that make up the blob. @@ -2410,7 +2130,7 @@ def commit_block_list( # type: ignore The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -2419,6 +2139,18 @@ def commit_block_list( # type: ignore Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword bool validate_content: If true, calculates an MD5 hash of the page content. The storage service checks the hash of the content that has arrived @@ -2465,23 +2197,30 @@ def commit_block_list( # type: ignore .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._commit_block_list_options( - block_list, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _commit_block_list_options( + block_list=block_list, content_settings=content_settings, metadata=metadata, **kwargs) try: - return self._client.block_blob.commit_block_list(**options) # type: ignore + return cast(Dict[str, Any], self._client.block_blob.commit_block_list(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): - # type: (Union[str, PremiumPageBlobTier], **Any) -> None + def set_premium_page_blob_tier(self, premium_page_blob_tier: "PremiumPageBlobTier", **kwargs: Any) -> None: """Sets the page blob tiers on the blob. This API is only supported for page blobs on premium accounts. :param premium_page_blob_tier: @@ -2496,9 +2235,11 @@ def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make - multiple calls to the Azure service and the timeout will apply to - each call individually. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -2519,23 +2260,8 @@ def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): except HttpResponseError as error: process_storage_error(error) - def _set_blob_tags_options(self, tags=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] - tags = serialize_blob_tags(tags) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - options = { - 'tags': tags, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def set_blob_tags(self, tags=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] + def set_blob_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: """The Set Tags operation enables users to set tags on a blob or specific blob version, but not snapshot. Each call to this operation replaces all existing tags attached to the blob. To remove all tags from the blob, call this operation with no tags set. @@ -2548,7 +2274,7 @@ def set_blob_tags(self, tags=None, **kwargs): The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) :type tags: dict(str, str) :keyword str version_id: The version id parameter is an opaque DateTime @@ -2568,39 +2294,29 @@ def set_blob_tags(self, tags=None, **kwargs): or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Any] """ - options = self._set_blob_tags_options(tags=tags, **kwargs) + version_id = get_version_id(self.version_id, kwargs) + options = _set_blob_tags_options(version_id=version_id, tags=tags, **kwargs) try: - return self._client.blob.set_tags(**options) + return cast(Dict[str, Any], self._client.blob.set_tags(**options)) except HttpResponseError as error: process_storage_error(error) - def _get_blob_tags_options(self, **kwargs): - # type: (**Any) -> Dict[str, str] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - options = { - 'version_id': kwargs.pop('version_id', None), - 'snapshot': self.snapshot, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'timeout': kwargs.pop('timeout', None), - 'cls': return_headers_and_deserialized} - return options - @distributed_trace - def get_blob_tags(self, **kwargs): - # type: (**Any) -> Dict[str, str] + def get_blob_tags(self, **kwargs: Any) -> Dict[str, str]: """The Get Tags operation enables users to get tags on a blob or specific blob version, or snapshot. .. versionadded:: 12.4.0 This operation was introduced in API version '2019-12-12'. - :keyword str version_id: + :keyword Optional[str] version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to add tags to. :keyword str if_tags_match_condition: @@ -2611,59 +2327,30 @@ def get_blob_tags(self, **kwargs): or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Key value pairs of blob tags. :rtype: Dict[str, str] """ - options = self._get_blob_tags_options(**kwargs) + version_id = get_version_id(self.version_id, kwargs) + options = _get_blob_tags_options(version_id=version_id, snapshot=self.snapshot, **kwargs) try: _, tags = self._client.blob.get_tags(**options) - return parse_tags(tags) # pylint: disable=protected-access + return cast(Dict[str, str], parse_tags(tags)) except HttpResponseError as error: process_storage_error(error) - def _get_page_ranges_options( # type: ignore - self, offset=None, # type: Optional[int] - length=None, # type: Optional[int] - previous_snapshot_diff=None, # type: Optional[Union[str, Dict[str, Any]]] - **kwargs - ): - # type: (...) -> Dict[str, Any] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if length is not None and offset is None: - raise ValueError("Offset value must not be None if length is set.") - if length is not None: - length = offset + length - 1 # Reformat to an inclusive range index - page_range, _ = validate_and_format_range_headers( - offset, length, start_range_required=False, end_range_required=False, align_to_page=True - ) - options = { - 'snapshot': self.snapshot, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'timeout': kwargs.pop('timeout', None), - 'range': page_range} - if previous_snapshot_diff: - try: - options['prevsnapshot'] = previous_snapshot_diff.snapshot # type: ignore - except AttributeError: - try: - options['prevsnapshot'] = previous_snapshot_diff['snapshot'] # type: ignore - except TypeError: - options['prevsnapshot'] = previous_snapshot_diff - options.update(kwargs) - return options - @distributed_trace - def get_page_ranges( # type: ignore - self, offset=None, # type: Optional[int] - length=None, # type: Optional[int] - previous_snapshot_diff=None, # type: Optional[Union[str, Dict[str, Any]]] - **kwargs - ): - # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] - """Returns the list of valid page ranges for a Page Blob or snapshot + def get_page_ranges( + self, offset: Optional[int] = None, + length: Optional[int] = None, + previous_snapshot_diff: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: + """DEPRECATED: Returns the list of valid page ranges for a Page Blob or snapshot of a page blob. :param int offset: @@ -2712,13 +2399,23 @@ def get_page_ranges( # type: ignore .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. The first element are filled page ranges, the 2nd element is cleared page ranges. :rtype: tuple(list(dict(str, str), list(dict(str, str)) """ - options = self._get_page_ranges_options( + warnings.warn( + "get_page_ranges is deprecated, use list_page_ranges instead", + DeprecationWarning + ) + + options = _get_page_ranges_options( + snapshot=self.snapshot, offset=offset, length=length, previous_snapshot_diff=previous_snapshot_diff, @@ -2732,14 +2429,104 @@ def get_page_ranges( # type: ignore process_storage_error(error) return get_page_ranges_result(ranges) + @distributed_trace + def list_page_ranges( + self, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + previous_snapshot: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> ItemPaged[PageRange]: + """Returns the list of valid page ranges for a Page Blob or snapshot + of a page blob. If `previous_snapshot` is specified, the result will be + a diff of changes between the target blob and the previous snapshot. + + :keyword int offset: + Start of byte range to use for getting valid page ranges. + If no length is given, all bytes after the offset will be searched. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword int length: + Number of bytes to use for getting valid page ranges. + If length is given, offset must be provided. + This range will return valid page ranges from the offset start up to + the specified length. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword previous_snapshot: + A snapshot value that specifies that the response will contain only pages that were changed + between target blob and previous snapshot. Changed pages include both updated and cleared + pages. The target blob may be a snapshot, as long as the snapshot specified by `previous_snapshot` + is the older of the two. + :paramtype previous_snapshot: str or Dict[str, Any] + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int results_per_page: + The maximum number of page ranges to retrieve per API call. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: An iterable (auto-paging) of PageRange. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.PageRange] + """ + results_per_page = kwargs.pop('results_per_page', None) + options = _get_page_ranges_options( + snapshot=self.snapshot, + offset=offset, + length=length, + previous_snapshot_diff=previous_snapshot, + **kwargs) + + if previous_snapshot: + command = partial( + self._client.page_blob.get_page_ranges_diff, + **options) + else: + command = partial( + self._client.page_blob.get_page_ranges, + **options) + return ItemPaged( + command, results_per_page=results_per_page, + page_iterator_class=PageRangePaged) + @distributed_trace def get_page_range_diff_for_managed_disk( - self, previous_snapshot_url, # type: str - offset=None, # type: Optional[int] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] + self, previous_snapshot_url: str, + offset: Optional[int] = None, + length:Optional[int] = None, + **kwargs: Any + ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: """Returns the list of valid page ranges for a managed disk or snapshot. .. note:: @@ -2748,7 +2535,7 @@ def get_page_range_diff_for_managed_disk( .. versionadded:: 12.2.0 This operation was introduced in API version '2019-07-07'. - :param previous_snapshot_url: + :param str previous_snapshot_url: Specifies the URL of a previous snapshot of the managed disk. The response will only contain pages that were changed between the target blob and its previous snapshot. @@ -2788,13 +2575,18 @@ def get_page_range_diff_for_managed_disk( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. The first element are filled page ranges, the 2nd element is cleared page ranges. :rtype: tuple(list(dict(str, str), list(dict(str, str)) """ - options = self._get_page_ranges_options( + options = _get_page_ranges_options( + snapshot=self.snapshot, offset=offset, length=length, prev_snapshot_url=previous_snapshot_url, @@ -2805,25 +2597,12 @@ def get_page_range_diff_for_managed_disk( process_storage_error(error) return get_page_ranges_result(ranges) - def _set_sequence_number_options(self, sequence_number_action, sequence_number=None, **kwargs): - # type: (Union[str, SequenceNumberAction], Optional[str], **Any) -> Dict[str, Any] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if sequence_number_action is None: - raise ValueError("A sequence number action must be specified") - options = { - 'sequence_number_action': sequence_number_action, - 'timeout': kwargs.pop('timeout', None), - 'blob_sequence_number': sequence_number, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def set_sequence_number(self, sequence_number_action, sequence_number=None, **kwargs): - # type: (Union[str, SequenceNumberAction], Optional[str], **Any) -> Dict[str, Union[str, datetime]] + def set_sequence_number( + self, sequence_number_action: Union[str, "SequenceNumberAction"], + sequence_number: Optional[str] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets the blob sequence number. :param str sequence_number_action: @@ -2861,44 +2640,22 @@ def set_sequence_number(self, sequence_number_action, sequence_number=None, **kw .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._set_sequence_number_options( - sequence_number_action, sequence_number=sequence_number, **kwargs) + options = _set_sequence_number_options(sequence_number_action, sequence_number=sequence_number, **kwargs) try: - return self._client.page_blob.update_sequence_number(**options) # type: ignore + return cast(Dict[str, Any], self._client.page_blob.update_sequence_number(**options)) except HttpResponseError as error: process_storage_error(error) - def _resize_blob_options(self, size, **kwargs): - # type: (int, **Any) -> Dict[str, Any] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if size is None: - raise ValueError("A content length must be specified for a Page Blob.") - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'blob_content_length': size, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def resize_blob(self, size, **kwargs): - # type: (int, **Any) -> Dict[str, Union[str, datetime]] + def resize_blob(self, size: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: """Resizes a page blob to the specified size. If the specified value is less than the current size of the blob, @@ -2939,74 +2696,29 @@ def resize_blob(self, size, **kwargs): blob and number of allowed IOPS. This is only applicable to page blobs on premium storage accounts. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._resize_blob_options(size, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _resize_blob_options(size=size, **kwargs) try: - return self._client.page_blob.resize(**options) # type: ignore + return cast(Dict[str, Any], self._client.page_blob.resize(**options)) except HttpResponseError as error: process_storage_error(error) - def _upload_page_options( # type: ignore - self, page, # type: bytes - offset, # type: int - length, # type: int - **kwargs - ): - # type: (...) -> Dict[str, Any] - if isinstance(page, six.text_type): - page = page.encode(kwargs.pop('encoding', 'UTF-8')) - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - - if offset is None or offset % 512 != 0: - raise ValueError("offset must be an integer that aligns with 512 page size") - if length is None or length % 512 != 0: - raise ValueError("length must be an integer that aligns with 512 page size") - end_range = offset + length - 1 # Reformat to an inclusive range index - content_range = 'bytes={0}-{1}'.format(offset, end_range) # type: ignore - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - seq_conditions = SequenceNumberAccessConditions( - if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), - if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), - if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) - ) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - validate_content = kwargs.pop('validate_content', False) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'body': page[:length], - 'content_length': length, - 'transactional_content_md5': None, - 'timeout': kwargs.pop('timeout', None), - 'range': content_range, - 'lease_access_conditions': access_conditions, - 'sequence_number_access_conditions': seq_conditions, - 'modified_access_conditions': mod_conditions, - 'validate_content': validate_content, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def upload_page( # type: ignore - self, page, # type: bytes - offset, # type: int - length, # type: int - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + def upload_page( + self, page: bytes, + offset: int, + length: int, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """The Upload Pages operation writes a range of pages to a page blob. :param bytes page: @@ -3080,87 +2792,36 @@ def upload_page( # type: ignore :keyword str encoding: Defaults to UTF-8. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._upload_page_options( + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_page_options( page=page, offset=offset, length=length, **kwargs) try: - return self._client.page_blob.upload_pages(**options) # type: ignore + return cast(Dict[str, Any], self._client.page_blob.upload_pages(**options)) except HttpResponseError as error: process_storage_error(error) - def _upload_pages_from_url_options( # type: ignore - self, source_url, # type: str - offset, # type: int - length, # type: int - source_offset, # type: int - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - - # TODO: extract the code to a method format_range - if offset is None or offset % 512 != 0: - raise ValueError("offset must be an integer that aligns with 512 page size") - if length is None or length % 512 != 0: - raise ValueError("length must be an integer that aligns with 512 page size") - if source_offset is None or offset % 512 != 0: - raise ValueError("source_offset must be an integer that aligns with 512 page size") - - # Format range - end_range = offset + length - 1 - destination_range = 'bytes={0}-{1}'.format(offset, end_range) - source_range = 'bytes={0}-{1}'.format(source_offset, source_offset + length - 1) # should subtract 1 here? - - seq_conditions = SequenceNumberAccessConditions( - if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), - if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), - if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) - ) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - source_mod_conditions = get_source_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - source_content_md5 = kwargs.pop('source_content_md5', None) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'source_url': source_url, - 'content_length': 0, - 'source_range': source_range, - 'range': destination_range, - 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'sequence_number_access_conditions': seq_conditions, - 'modified_access_conditions': mod_conditions, - 'source_modified_access_conditions': source_mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def upload_pages_from_url(self, source_url, # type: str - offset, # type: int - length, # type: int - source_offset, # type: int - **kwargs - ): - # type: (...) -> Dict[str, Any] + def upload_pages_from_url( + self, source_url: str, + offset: int, + length: int, + source_offset: int, + **kwargs: Any + ) -> Dict[str, Any]: """ The Upload Pages operation writes a range of pages to a page blob where the contents are read from a URL. @@ -3250,61 +2911,35 @@ def upload_pages_from_url(self, source_url, # type: str .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Response after uploading pages from specified URL. + :rtype: Dict[str, Any] """ - options = self._upload_pages_from_url_options( - source_url=self._encode_source_url(source_url), + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_pages_from_url_options( + source_url=source_url, offset=offset, length=length, source_offset=source_offset, **kwargs ) try: - return self._client.page_blob.upload_pages_from_url(**options) # type: ignore + return cast(Dict[str, Any], self._client.page_blob.upload_pages_from_url(**options)) except HttpResponseError as error: process_storage_error(error) - def _clear_page_options(self, offset, length, **kwargs): - # type: (int, int, **Any) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - seq_conditions = SequenceNumberAccessConditions( - if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), - if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), - if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) - ) - mod_conditions = get_modify_conditions(kwargs) - if offset is None or offset % 512 != 0: - raise ValueError("offset must be an integer that aligns with 512 page size") - if length is None or length % 512 != 0: - raise ValueError("length must be an integer that aligns with 512 page size") - end_range = length + offset - 1 # Reformat to an inclusive range index - content_range = 'bytes={0}-{1}'.format(offset, end_range) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'content_length': 0, - 'timeout': kwargs.pop('timeout', None), - 'range': content_range, - 'lease_access_conditions': access_conditions, - 'sequence_number_access_conditions': seq_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def clear_page(self, offset, length, **kwargs): - # type: (int, int, **Any) -> Dict[str, Union[str, datetime]] + def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: """Clears a range of pages. :param int offset: @@ -3359,77 +2994,34 @@ def clear_page(self, offset, length, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._clear_page_options(offset, length, **kwargs) + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _clear_page_options( + offset=offset, + length=length, + **kwargs + ) try: - return self._client.page_blob.clear_pages(**options) # type: ignore + return cast(Dict[str, Any], self._client.page_blob.clear_pages(**options)) except HttpResponseError as error: process_storage_error(error) - def _append_block_options( # type: ignore - self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - - if isinstance(data, six.text_type): - data = data.encode(kwargs.pop('encoding', 'UTF-8')) # type: ignore - if length is None: - length = get_length(data) - if length is None: - length, data = read_length(data) - if length == 0: - return {} - if isinstance(data, bytes): - data = data[:length] - - appendpos_condition = kwargs.pop('appendpos_condition', None) - maxsize_condition = kwargs.pop('maxsize_condition', None) - validate_content = kwargs.pop('validate_content', False) - append_conditions = None - if maxsize_condition or appendpos_condition is not None: - append_conditions = AppendPositionAccessConditions( - max_size=maxsize_condition, - append_position=appendpos_condition - ) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'body': data, - 'content_length': length, - 'timeout': kwargs.pop('timeout', None), - 'transactional_content_md5': None, - 'lease_access_conditions': access_conditions, - 'append_position_access_conditions': append_conditions, - 'modified_access_conditions': mod_conditions, - 'validate_content': validate_content, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def append_block( # type: ignore - self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime, int]] + def append_block( + self, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + length: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime, int]]: """Commits a new block of data to the end of the existing append blob. :param data: @@ -3499,85 +3091,35 @@ def append_block( # type: ignore .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). :rtype: dict(str, Any) """ - options = self._append_block_options( - data, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _append_block_options( + data=data, length=length, **kwargs ) try: - return self._client.append_blob.append_block(**options) # type: ignore + return cast(Dict[str, Any], self._client.append_blob.append_block(**options)) except HttpResponseError as error: process_storage_error(error) - def _append_block_from_url_options( # type: ignore - self, copy_source_url, # type: str - source_offset=None, # type: Optional[int] - source_length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - - # If end range is provided, start range must be provided - if source_length is not None and source_offset is None: - raise ValueError("source_offset should also be specified if source_length is specified") - # Format based on whether length is present - source_range = None - if source_length is not None: - end_range = source_offset + source_length - 1 - source_range = 'bytes={0}-{1}'.format(source_offset, end_range) - elif source_offset is not None: - source_range = "bytes={0}-".format(source_offset) - - appendpos_condition = kwargs.pop('appendpos_condition', None) - maxsize_condition = kwargs.pop('maxsize_condition', None) - source_content_md5 = kwargs.pop('source_content_md5', None) - append_conditions = None - if maxsize_condition or appendpos_condition is not None: - append_conditions = AppendPositionAccessConditions( - max_size=maxsize_condition, - append_position=appendpos_condition - ) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - source_mod_conditions = get_source_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'source_url': copy_source_url, - 'content_length': 0, - 'source_range': source_range, - 'source_content_md5': source_content_md5, - 'transactional_content_md5': None, - 'lease_access_conditions': access_conditions, - 'append_position_access_conditions': append_conditions, - 'modified_access_conditions': mod_conditions, - 'source_modified_access_conditions': source_mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - 'timeout': kwargs.pop('timeout', None)} - options.update(kwargs) - return options - @distributed_trace - def append_block_from_url(self, copy_source_url, # type: str - source_offset=None, # type: Optional[int] - source_length=None, # type: Optional[int] - **kwargs): - # type: (...) -> Dict[str, Union[str, datetime, int]] + def append_block_from_url( + self, copy_source_url: str, + source_offset: Optional[int] = None, + source_length: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime, int]]: """ Creates a new block to be committed as part of a blob, where the contents are read from a source url. @@ -3661,45 +3203,35 @@ def append_block_from_url(self, copy_source_url, # type: str .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Result after appending a new block. + :rtype: Dict[str, Union[str, datetime, int]] """ - options = self._append_block_from_url_options( - copy_source_url=self._encode_source_url(copy_source_url), + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _append_block_from_url_options( + copy_source_url=copy_source_url, source_offset=source_offset, source_length=source_length, **kwargs ) try: - return self._client.append_blob.append_block_from_url(**options) # type: ignore + return cast(Dict[str, Union[str, datetime, int]], + self._client.append_blob.append_block_from_url(**options)) except HttpResponseError as error: process_storage_error(error) - def _seal_append_blob_options(self, **kwargs): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - - appendpos_condition = kwargs.pop('appendpos_condition', None) - append_conditions = None - if appendpos_condition is not None: - append_conditions = AppendPositionAccessConditions( - append_position=appendpos_condition - ) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - options = { - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'append_position_access_conditions': append_conditions, - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def seal_append_blob(self, **kwargs): - # type: (...) -> Dict[str, Union[str, datetime, int]] + def seal_append_blob(self, **kwargs: Any) -> Dict[str, Union[str, datetime, int]]: """The Seal operation seals the Append Blob to make it read-only. .. versionadded:: 12.4.0 @@ -3732,12 +3264,51 @@ def seal_append_blob(self, **kwargs): :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). :rtype: dict(str, Any) """ - options = self._seal_append_blob_options(**kwargs) + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + options = _seal_append_blob_options(**kwargs) try: - return self._client.append_blob.seal(**options) # type: ignore + return cast(Dict[str, Any], self._client.append_blob.seal(**options)) except HttpResponseError as error: process_storage_error(error) + + @distributed_trace + def _get_container_client(self) -> "ContainerClient": + """Get a client to interact with the blob's parent container. + + The container need not already exist. Defaults to current blob's credentials. + + :returns: A ContainerClient. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START get_container_client_from_blob_client] + :end-before: [END get_container_client_from_blob_client] + :language: python + :dedent: 8 + :caption: Get container client from blob object. + """ + from ._container_client import ContainerClient + if not isinstance(self._pipeline._transport, TransportWrapper): # pylint: disable = protected-access + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline + return ContainerClient( + f"{self.scheme}://{self.primary_hostname}", container_name=self.container_name, + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_client_helpers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_client_helpers.py new file mode 100644 index 000000000000..a04f0ea02525 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_client_helpers.py @@ -0,0 +1,1246 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines + +from io import BytesIO +from typing import ( + Any, AnyStr, AsyncGenerator, AsyncIterable, cast, + Dict, IO, Iterable, List, Optional, Tuple, Union, + TYPE_CHECKING +) +from urllib.parse import quote, unquote, urlparse + +from ._deserialize import deserialize_blob_stream +from ._encryption import modify_user_agent_for_encryption, _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION +from ._generated.models import ( + AppendPositionAccessConditions, + BlobHTTPHeaders, + BlockList, + BlockLookupList, + CpkInfo, + DeleteSnapshotsOptionType, + QueryRequest, + SequenceNumberAccessConditions +) +from ._models import ( + BlobBlock, + BlobProperties, + BlobType, + DelimitedJsonDialect, + DelimitedTextDialect, + PremiumPageBlobTier, + QuickQueryDialect +) +from ._serialize import ( + get_access_conditions, + get_cpk_scope_info, + get_modify_conditions, + get_source_conditions, + serialize_blob_tags_header, + serialize_blob_tags, + serialize_query_format +) +from ._shared import encode_base64 +from ._shared.base_client import parse_query +from ._shared.request_handlers import ( + add_metadata_headers, + get_length, + read_length, + validate_and_format_range_headers +) +from ._shared.response_handlers import return_headers_and_deserialized, return_response_headers +from ._shared.uploads import IterStreamer +from ._shared.uploads_async import AsyncIterStreamer +from ._upload_helpers import _any_conditions + +if TYPE_CHECKING: + from urllib.parse import ParseResult + from ._generated import AzureBlobStorage + from ._models import ContentSettings + from ._shared.models import StorageConfiguration + + +def _parse_url( + account_url: str, + container_name: str, + blob_name: str +) -> Tuple["ParseResult", Optional[str], Optional[str]]: + try: + if not account_url.lower().startswith('http'): + account_url = "https://" + account_url + except AttributeError as exc: + raise ValueError("Account URL must be a string.") from exc + parsed_url = urlparse(account_url.rstrip('/')) + + if not (container_name and blob_name): + raise ValueError("Please specify a container name and blob name.") + if not parsed_url.netloc: + raise ValueError(f"Invalid URL: {account_url}") + + path_snapshot, sas_token = parse_query(parsed_url.query) + + return parsed_url, sas_token, path_snapshot + +def _format_url(container_name: Union[bytes, str], scheme: str, blob_name: str, query_str: str, hostname: str) -> str: + if isinstance(container_name, str): + container_name = container_name.encode('UTF-8') + return f"{scheme}://{hostname}/{quote(container_name)}/{quote(blob_name, safe='~/')}{query_str}" + +def _encode_source_url(source_url: str) -> str: + parsed_source_url = urlparse(source_url) + source_scheme = parsed_source_url.scheme + source_hostname = parsed_source_url.netloc.rstrip('/') + source_path = unquote(parsed_source_url.path) + source_query = parsed_source_url.query + result = [f"{source_scheme}://{source_hostname}{quote(source_path, safe='~/')}"] + if source_query: + result.append(source_query) + return '?'.join(result) + +def _upload_blob_options( # pylint:disable=too-many-statements + data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], + blob_type: Union[str, BlobType], + length: Optional[int], + metadata: Optional[Dict[str, str]], + encryption_options: Dict[str, Any], + config: "StorageConfiguration", + sdk_moniker: str, + client: "AzureBlobStorage", + **kwargs: Any +) -> Dict[str, Any]: + encoding = kwargs.pop('encoding', 'UTF-8') + if isinstance(data, str): + data = data.encode(encoding) + if length is None: + length = get_length(data) + if isinstance(data, bytes): + data = data[:length] + + stream: Optional[Any] = None + if isinstance(data, bytes): + stream = BytesIO(data) + elif hasattr(data, 'read'): + stream = data + elif hasattr(data, '__iter__') and not isinstance(data, (list, tuple, set, dict)): + stream = IterStreamer(data, encoding=encoding) + elif hasattr(data, '__aiter__'): + stream = AsyncIterStreamer(cast(AsyncGenerator, data), encoding=encoding) + else: + raise TypeError(f"Unsupported data type: {type(data)}") + + validate_content = kwargs.pop('validate_content', False) + content_settings = kwargs.pop('content_settings', None) + overwrite = kwargs.pop('overwrite', False) + max_concurrency = kwargs.pop('max_concurrency', 1) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + kwargs['cpk_info'] = cpk_info + + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + kwargs['lease_access_conditions'] = get_access_conditions(kwargs.pop('lease', None)) + kwargs['modified_access_conditions'] = get_modify_conditions(kwargs) + kwargs['cpk_scope_info'] = get_cpk_scope_info(kwargs) + if content_settings: + kwargs['blob_headers'] = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + kwargs['blob_tags_string'] = serialize_blob_tags_header(kwargs.pop('tags', None)) + kwargs['stream'] = stream + kwargs['length'] = length + kwargs['overwrite'] = overwrite + kwargs['headers'] = headers + kwargs['validate_content'] = validate_content + kwargs['blob_settings'] = config + kwargs['max_concurrency'] = max_concurrency + kwargs['encryption_options'] = encryption_options + # Add feature flag to user agent for encryption + if encryption_options['key']: + modify_user_agent_for_encryption( + config.user_agent_policy.user_agent, + sdk_moniker, + encryption_options['version'], + kwargs) + + if blob_type == BlobType.BlockBlob: + kwargs['client'] = client.block_blob + elif blob_type == BlobType.PageBlob: + if (encryption_options['version'] == '2.0' and + (encryption_options['required'] or encryption_options['key'] is not None)): + raise ValueError("Encryption version 2.0 does not currently support page blobs.") + kwargs['client'] = client.page_blob + elif blob_type == BlobType.AppendBlob: + if encryption_options['required'] or (encryption_options['key'] is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + kwargs['client'] = client.append_blob + else: + raise ValueError(f"Unsupported BlobType: {blob_type}") + return kwargs + +def _upload_blob_from_url_options(source_url: str, **kwargs: Any) -> Dict[str, Any]: + metadata = kwargs.pop('metadata', None) + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + source_url = _encode_source_url(source_url=source_url) + tier = kwargs.pop('standard_blob_tier', None) + overwrite = kwargs.pop('overwrite', False) + content_settings = kwargs.pop('content_settings', None) + source_authorization = kwargs.pop('source_authorization', None) + if content_settings: + kwargs['blob_http_headers'] = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=None, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'copy_source_authorization': source_authorization, + 'content_length': 0, + 'copy_source_blob_properties': kwargs.pop('include_source_blob_properties', True), + 'source_content_md5': kwargs.pop('source_content_md5', None), + 'copy_source': source_url, + 'modified_access_conditions': get_modify_conditions(kwargs), + 'blob_tags_string': serialize_blob_tags_header(kwargs.pop('tags', None)), + 'cls': return_response_headers, + 'lease_access_conditions': get_access_conditions(kwargs.pop('destination_lease', None)), + 'tier': tier.value if tier else None, + 'source_modified_access_conditions': get_source_conditions(kwargs), + 'cpk_info': cpk_info, + 'cpk_scope_info': get_cpk_scope_info(kwargs), + 'headers': headers, + } + options.update(kwargs) + if not overwrite and not _any_conditions(**options): + options['modified_access_conditions'].if_none_match = '*' + return options + +def _download_blob_options( + blob_name: str, + container_name: str, + version_id: Optional[str], + offset: Optional[int], + length: Optional[int], + encoding: Optional[str], + encryption_options: Dict[str, Any], + config: "StorageConfiguration", + sdk_moniker: str, + client: "AzureBlobStorage", + **kwargs +) -> Dict[str, Any]: + """Creates a dictionary containing the options for a download blob operation. + + :param str blob_name: + The name of the blob. + :param str container_name: + The name of the container. + :param Optional[str] version_id: + The version id parameter is a value that, when present, specifies the version of the blob to download. + :param Optional[int] offset: + Start of byte range to use for downloading a section of the blob. Must be set if length is provided. + :param Optional[int] length: + Number of bytes to read from the stream. This is optional, but should be supplied for optimal performance. + :param Optional[str] encoding: + Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :param Dict[str, Any] encryption_options: + The options for encryption, if enabled. + :param StorageConfiguration config: + The Storage configuration options. + :param str sdk_moniker: + The string representing the SDK package version. + :param AzureBlobStorage client: + The generated Blob Storage client. + :returns: A dictionary containing the download blob options. + :rtype: Dict[str, Any] + """ + if length is not None: + if offset is None: + raise ValueError("Offset must be provided if length is provided.") + length = offset + length - 1 # Service actually uses an end-range inclusive index + + validate_content = kwargs.pop('validate_content', False) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + # Add feature flag to user agent for encryption + if encryption_options['key'] or encryption_options['resolver']: + modify_user_agent_for_encryption( + config.user_agent_policy.user_agent, + sdk_moniker, + encryption_options['version'], + kwargs) + + options = { + 'clients': client, + 'config': config, + 'start_range': offset, + 'end_range': length, + 'version_id': version_id, + 'validate_content': validate_content, + 'encryption_options': { + 'required': encryption_options['required'], + 'key': encryption_options['key'], + 'resolver': encryption_options['resolver']}, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'download_cls': kwargs.pop('cls', None) or deserialize_blob_stream, + 'max_concurrency':kwargs.pop('max_concurrency', 1), + 'encoding': encoding, + 'timeout': kwargs.pop('timeout', None), + 'name': blob_name, + 'container': container_name} + options.update(kwargs) + return options + +def _quick_query_options(snapshot: Optional[str], query_expression: str, **kwargs: Any ) -> Tuple[Dict[str, Any], str]: + delimiter = '\n' + input_format = kwargs.pop('blob_format', None) + if input_format == QuickQueryDialect.DelimitedJson: + input_format = DelimitedJsonDialect() + if input_format == QuickQueryDialect.DelimitedText: + input_format = DelimitedTextDialect() + input_parquet_format = input_format == "ParquetDialect" + if input_format and not input_parquet_format: + try: + delimiter = input_format.lineterminator + except AttributeError: + try: + delimiter = input_format.delimiter + except AttributeError as exc: + raise ValueError("The Type of blob_format can only be DelimitedTextDialect or " + "DelimitedJsonDialect or ParquetDialect") from exc + output_format = kwargs.pop('output_format', None) + if output_format == QuickQueryDialect.DelimitedJson: + output_format = DelimitedJsonDialect() + if output_format == QuickQueryDialect.DelimitedText: + output_format = DelimitedTextDialect() + if output_format: + if output_format == "ParquetDialect": + raise ValueError("ParquetDialect is invalid as an output format.") + try: + delimiter = output_format.lineterminator + except AttributeError: + try: + delimiter = output_format.delimiter + except AttributeError: + pass + else: + output_format = input_format if not input_parquet_format else None + query_request = QueryRequest( + expression=query_expression, + input_serialization=serialize_query_format(input_format), + output_serialization=serialize_query_format(output_format) + ) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) + options = { + 'query_request': query_request, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'snapshot': snapshot, + 'timeout': kwargs.pop('timeout', None), + 'cls': return_headers_and_deserialized, + } + options.update(kwargs) + return options, delimiter + +def _generic_delete_blob_options(delete_snapshots: Optional[str] = None, **kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if delete_snapshots: + delete_snapshots = DeleteSnapshotsOptionType(delete_snapshots) + options = { + 'timeout': kwargs.pop('timeout', None), + 'snapshot': kwargs.pop('snapshot', None), # this is added for delete_blobs + 'delete_snapshots': delete_snapshots or None, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions} + options.update(kwargs) + return options + +def _delete_blob_options( + snapshot: Optional[str], + version_id: Optional[str], + delete_snapshots: Optional[str] = None, + **kwargs: Any +) -> Dict[str, Any]: + if snapshot and delete_snapshots: + raise ValueError("The delete_snapshots option cannot be used with a specific snapshot.") + options = _generic_delete_blob_options(delete_snapshots, **kwargs) + options['snapshot'] = snapshot + options['version_id'] = version_id + options['blob_delete_type'] = kwargs.pop('blob_delete_type', None) + return options + +def _set_http_headers_options(content_settings: Optional["ContentSettings"] = None, **kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + blob_headers = None + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + options = { + 'timeout': kwargs.pop('timeout', None), + 'blob_http_headers': blob_headers, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _set_blob_metadata_options(metadata: Optional[Dict[str, str]] = None, **kwargs: Any): + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + +def _create_page_blob_options( + size: int, + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]] = None, + **kwargs: Any +) -> Dict[str, Any]: + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + blob_headers = None + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + + sequence_number = kwargs.pop('sequence_number', None) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + tier = None + if premium_page_blob_tier: + try: + tier = premium_page_blob_tier.value # type: ignore + except AttributeError: + tier = premium_page_blob_tier # type: ignore + + blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) + + options = { + 'content_length': 0, + 'blob_content_length': size, + 'blob_sequence_number': sequence_number, + 'blob_http_headers': blob_headers, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'blob_tags_string': blob_tags_string, + 'cls': return_response_headers, + "tier": tier, + 'headers': headers} + options.update(kwargs) + return options + +def _create_append_blob_options( + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any +) -> Dict[str, Any]: + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + blob_headers = None + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) + + options = { + 'content_length': 0, + 'blob_http_headers': blob_headers, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'blob_tags_string': blob_tags_string, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + +def _create_snapshot_options(metadata: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + +def _start_copy_from_url_options( # pylint:disable=too-many-statements + source_url: str, + metadata: Optional[Dict[str, str]] = None, + incremental_copy: bool = False, + **kwargs: Any +) -> Dict[str, Any]: + source_url = _encode_source_url(source_url=source_url) + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + if 'source_lease' in kwargs: + source_lease = kwargs.pop('source_lease') + try: + headers['x-ms-source-lease-id'] = source_lease.id + except AttributeError: + headers['x-ms-source-lease-id'] = source_lease + + tier = kwargs.pop('premium_page_blob_tier', None) or kwargs.pop('standard_blob_tier', None) + tags = kwargs.pop('tags', None) + + # Options only available for sync copy + requires_sync = kwargs.pop('requires_sync', None) + encryption_scope_str = kwargs.pop('encryption_scope', None) + source_authorization = kwargs.pop('source_authorization', None) + # If tags is a str, interpret that as copy_source_tags + copy_source_tags = isinstance(tags, str) + + if incremental_copy: + if source_authorization: + raise ValueError("Source authorization tokens are not applicable for incremental copying.") + if copy_source_tags: + raise ValueError("Copying source tags is not applicable for incremental copying.") + + # TODO: refactor start_copy_from_url api in _blob_client.py. Call _generated/_blob_operations.py copy_from_url + # when requires_sync=True is set. + # Currently both sync copy and async copy are calling _generated/_blob_operations.py start_copy_from_url. + # As sync copy diverges more from async copy, more problem will surface. + if requires_sync is True: + headers['x-ms-requires-sync'] = str(requires_sync) + if encryption_scope_str: + headers['x-ms-encryption-scope'] = encryption_scope_str + if source_authorization: + headers['x-ms-copy-source-authorization'] = source_authorization + if copy_source_tags: + headers['x-ms-copy-source-tag-option'] = tags + else: + if encryption_scope_str: + raise ValueError( + "Encryption_scope is only supported for sync copy, please specify requires_sync=True") + if source_authorization: + raise ValueError( + "Source authorization tokens are only supported for sync copy, please specify requires_sync=True") + if copy_source_tags: + raise ValueError( + "Copying source tags is only supported for sync copy, please specify requires_sync=True") + + timeout = kwargs.pop('timeout', None) + dest_mod_conditions = get_modify_conditions(kwargs) + blob_tags_string = serialize_blob_tags_header(tags) if not copy_source_tags else None + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + options = { + 'copy_source': source_url, + 'seal_blob': kwargs.pop('seal_destination_blob', None), + 'timeout': timeout, + 'modified_access_conditions': dest_mod_conditions, + 'blob_tags_string': blob_tags_string, + 'headers': headers, + 'cls': return_response_headers, + } + if not incremental_copy: + source_mod_conditions = get_source_conditions(kwargs) + dest_access_conditions = get_access_conditions(kwargs.pop('destination_lease', None)) + options['source_modified_access_conditions'] = source_mod_conditions + options['lease_access_conditions'] = dest_access_conditions + options['tier'] = tier.value if tier else None + options.update(kwargs) + return options + +def _abort_copy_options(copy_id: Union[str, Dict[str, Any], BlobProperties], **kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + if isinstance(copy_id, BlobProperties): + copy_id = copy_id.copy.id # type: ignore [assignment] + elif isinstance(copy_id, dict): + copy_id = copy_id['copy_id'] + options = { + 'copy_id': copy_id, + 'lease_access_conditions': access_conditions, + 'timeout': kwargs.pop('timeout', None)} + options.update(kwargs) + return options + +def _stage_block_options( + block_id: str, + data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + length: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: + block_id = encode_base64(str(block_id)) + if isinstance(data, str): + data = data.encode(kwargs.pop('encoding', 'UTF-8')) # type: ignore + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + if length is None: + length = get_length(data) + if length is None: + length, data = read_length(data) + if isinstance(data, bytes): + data = data[:length] + + validate_content = kwargs.pop('validate_content', False) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'block_id': block_id, + 'content_length': length, + 'body': data, + 'transactional_content_md5': None, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + } + options.update(kwargs) + return options + +def _stage_block_from_url_options( + block_id: str, + source_url: str, + source_offset: Optional[int] = None, + source_length: Optional[int] = None, + source_content_md5: Optional[Union[bytes, bytearray]] = None, + **kwargs: Any +) -> Dict[str, Any]: + source_url = _encode_source_url(source_url=source_url) + source_authorization = kwargs.pop('source_authorization', None) + if source_length is not None and source_offset is None: + raise ValueError("Source offset value must not be None if length is set.") + if source_length is not None and source_offset is not None: + source_length = source_offset + source_length - 1 + block_id = encode_base64(str(block_id)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + range_header = None + if source_offset is not None: + range_header, _ = validate_and_format_range_headers(source_offset, source_length) + + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'copy_source_authorization': source_authorization, + 'block_id': block_id, + 'content_length': 0, + 'source_url': source_url, + 'source_range': range_header, + 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + } + options.update(kwargs) + return options + +def _get_block_list_result(blocks: BlockList) -> Tuple[List[BlobBlock], List[BlobBlock]]: + committed = [] + uncommitted = [] + if blocks.committed_blocks: + committed = [BlobBlock._from_generated(b) for b in blocks.committed_blocks] # pylint: disable=protected-access + if blocks.uncommitted_blocks: + uncommitted = [BlobBlock._from_generated(b) for b in blocks.uncommitted_blocks] # pylint: disable=protected-access + return committed, uncommitted + +def _commit_block_list_options( + block_list: List[BlobBlock], + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any +) -> Dict[str, Any]: + block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) + for block in block_list: + if isinstance(block, BlobBlock): + if block.state.value == 'committed': + cast(List[str], block_lookup.committed).append(encode_base64(str(block.id))) + elif block.state.value == 'uncommitted': + cast(List[str], block_lookup.uncommitted).append(encode_base64(str(block.id))) + elif block_lookup.latest is not None: + block_lookup.latest.append(encode_base64(str(block.id))) + else: + block_lookup.latest.append(encode_base64(str(block))) + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + blob_headers = None + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + + validate_content = kwargs.pop('validate_content', False) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + tier = kwargs.pop('standard_blob_tier', None) + blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) + + options = { + 'blocks': block_lookup, + 'blob_http_headers': blob_headers, + 'lease_access_conditions': access_conditions, + 'timeout': kwargs.pop('timeout', None), + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'tier': tier.value if tier else None, + 'blob_tags_string': blob_tags_string, + 'headers': headers + } + options.update(kwargs) + return options + +def _set_blob_tags_options( + version_id: Optional[str], + tags: Optional[Dict[str, str]] = None, + **kwargs: Any +)-> Dict[str, Any]: + serialized_tags = serialize_blob_tags(tags) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'tags': serialized_tags, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'version_id': version_id, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _get_blob_tags_options(version_id: Optional[str], snapshot: Optional[str], **kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'version_id': version_id, + 'snapshot': snapshot, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'timeout': kwargs.pop('timeout', None), + 'cls': return_headers_and_deserialized} + return options + +def _get_page_ranges_options( + snapshot: Optional[str], + offset: Optional[int] = None, + length: Optional[int] = None, + previous_snapshot_diff: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any +) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if length is not None and offset is None: + raise ValueError("Offset value must not be None if length is set.") + if length is not None and offset is not None: + length = offset + length - 1 # Reformat to an inclusive range index + page_range, _ = validate_and_format_range_headers( + offset, length, start_range_required=False, end_range_required=False, align_to_page=True + ) + options = { + 'snapshot': snapshot, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'timeout': kwargs.pop('timeout', None), + 'range': page_range} + if previous_snapshot_diff: + try: + options['prevsnapshot'] = previous_snapshot_diff.snapshot # type: ignore + except AttributeError: + try: + options['prevsnapshot'] = previous_snapshot_diff['snapshot'] # type: ignore + except TypeError: + options['prevsnapshot'] = previous_snapshot_diff + options.update(kwargs) + return options + +def _set_sequence_number_options( + sequence_number_action: str, + sequence_number: Optional[str] = None, + **kwargs: Any +) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if sequence_number_action is None: + raise ValueError("A sequence number action must be specified") + options = { + 'sequence_number_action': sequence_number_action, + 'timeout': kwargs.pop('timeout', None), + 'blob_sequence_number': sequence_number, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _resize_blob_options(size: int, **kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if size is None: + raise ValueError("A content length must be specified for a Page Blob.") + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'blob_content_length': size, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _upload_page_options( + page: bytes, + offset: int, + length: int, + **kwargs: Any +) -> Dict[str, Any]: + if isinstance(page, str): + page = page.encode(kwargs.pop('encoding', 'UTF-8')) + if offset is None or offset % 512 != 0: + raise ValueError("offset must be an integer that aligns with 512 page size") + if length is None or length % 512 != 0: + raise ValueError("length must be an integer that aligns with 512 page size") + end_range = offset + length - 1 # Reformat to an inclusive range index + content_range = f'bytes={offset}-{end_range}' # type: ignore + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + seq_conditions = SequenceNumberAccessConditions( + if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), + if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), + if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) + ) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + validate_content = kwargs.pop('validate_content', False) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'body': page[:length], + 'content_length': length, + 'transactional_content_md5': None, + 'timeout': kwargs.pop('timeout', None), + 'range': content_range, + 'lease_access_conditions': access_conditions, + 'sequence_number_access_conditions': seq_conditions, + 'modified_access_conditions': mod_conditions, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _upload_pages_from_url_options( + source_url: str, + offset: int, + length: int, + source_offset: int, + **kwargs: Any +) -> Dict[str, Any]: + source_url = _encode_source_url(source_url=source_url) + # TODO: extract the code to a method format_range + if offset is None or offset % 512 != 0: + raise ValueError("offset must be an integer that aligns with 512 page size") + if length is None or length % 512 != 0: + raise ValueError("length must be an integer that aligns with 512 page size") + if source_offset is None or offset % 512 != 0: + raise ValueError("source_offset must be an integer that aligns with 512 page size") + + # Format range + end_range = offset + length - 1 + destination_range = f'bytes={offset}-{end_range}' + source_range = f'bytes={source_offset}-{source_offset + length - 1}' # should subtract 1 here? + + seq_conditions = SequenceNumberAccessConditions( + if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), + if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), + if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) + ) + source_authorization = kwargs.pop('source_authorization', None) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + source_mod_conditions = get_source_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + source_content_md5 = kwargs.pop('source_content_md5', None) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'copy_source_authorization': source_authorization, + 'source_url': source_url, + 'content_length': 0, + 'source_range': source_range, + 'range': destination_range, + 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'sequence_number_access_conditions': seq_conditions, + 'modified_access_conditions': mod_conditions, + 'source_modified_access_conditions': source_mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _clear_page_options( + offset: int, + length: int, + **kwargs: Any +) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + seq_conditions = SequenceNumberAccessConditions( + if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), + if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), + if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) + ) + mod_conditions = get_modify_conditions(kwargs) + if offset is None or offset % 512 != 0: + raise ValueError("offset must be an integer that aligns with 512 page size") + if length is None or length % 512 != 0: + raise ValueError("length must be an integer that aligns with 512 page size") + end_range = length + offset - 1 # Reformat to an inclusive range index + content_range = f'bytes={offset}-{end_range}' + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'content_length': 0, + 'timeout': kwargs.pop('timeout', None), + 'range': content_range, + 'lease_access_conditions': access_conditions, + 'sequence_number_access_conditions': seq_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _append_block_options( + data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + length: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: + if isinstance(data, str): + data = data.encode(kwargs.pop('encoding', 'UTF-8')) + if length is None: + length = get_length(data) + if length is None: + length, data = read_length(data) + if length == 0: + return {} + if isinstance(data, bytes): + data = data[:length] + + appendpos_condition = kwargs.pop('appendpos_condition', None) + maxsize_condition = kwargs.pop('maxsize_condition', None) + validate_content = kwargs.pop('validate_content', False) + append_conditions = None + if maxsize_condition or appendpos_condition is not None: + append_conditions = AppendPositionAccessConditions( + max_size=maxsize_condition, + append_position=appendpos_condition + ) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'body': data, + 'content_length': length, + 'timeout': kwargs.pop('timeout', None), + 'transactional_content_md5': None, + 'lease_access_conditions': access_conditions, + 'append_position_access_conditions': append_conditions, + 'modified_access_conditions': mod_conditions, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _append_block_from_url_options( + copy_source_url: str, + source_offset: Optional[int] = None, + source_length: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: + copy_source_url = _encode_source_url(source_url=copy_source_url) + # If end range is provided, start range must be provided + if source_length is not None and source_offset is None: + raise ValueError("source_offset should also be specified if source_length is specified") + # Format based on whether length is present + source_range = None + if source_length is not None and source_offset is not None: + end_range = source_offset + source_length - 1 + source_range = f'bytes={source_offset}-{end_range}' + elif source_offset is not None: + source_range = f"bytes={source_offset}-" + + appendpos_condition = kwargs.pop('appendpos_condition', None) + maxsize_condition = kwargs.pop('maxsize_condition', None) + source_content_md5 = kwargs.pop('source_content_md5', None) + append_conditions = None + if maxsize_condition or appendpos_condition is not None: + append_conditions = AppendPositionAccessConditions( + max_size=maxsize_condition, + append_position=appendpos_condition + ) + source_authorization = kwargs.pop('source_authorization', None) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + source_mod_conditions = get_source_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'copy_source_authorization': source_authorization, + 'source_url': copy_source_url, + 'content_length': 0, + 'source_range': source_range, + 'source_content_md5': source_content_md5, + 'transactional_content_md5': None, + 'lease_access_conditions': access_conditions, + 'append_position_access_conditions': append_conditions, + 'modified_access_conditions': mod_conditions, + 'source_modified_access_conditions': source_mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + 'timeout': kwargs.pop('timeout', None)} + options.update(kwargs) + return options + +def _seal_append_blob_options(**kwargs: Any) -> Dict[str, Any]: + appendpos_condition = kwargs.pop('appendpos_condition', None) + append_conditions = None + if appendpos_condition is not None: + append_conditions = AppendPositionAccessConditions( + append_position=appendpos_condition + ) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'append_position_access_conditions': append_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _from_blob_url( + blob_url: str, + snapshot: Optional[Union[BlobProperties, str, Dict[str, Any]]] +) -> Tuple[str, str, str, Optional[str]]: + try: + if not blob_url.lower().startswith('http'): + blob_url = "https://" + blob_url + except AttributeError as exc: + raise ValueError("Blob URL must be a string.") from exc + parsed_url = urlparse(blob_url.rstrip('/')) + + if not parsed_url.netloc: + raise ValueError(f"Invalid URL: {blob_url}") + + account_path = "" + if ".core." in parsed_url.netloc: + # .core. is indicating non-customized url. Blob name with directory info can also be parsed. + path_blob = parsed_url.path.lstrip('/').split('/', maxsplit=1) + elif "localhost" in parsed_url.netloc or "127.0.0.1" in parsed_url.netloc: + path_blob = parsed_url.path.lstrip('/').split('/', maxsplit=2) + account_path += '/' + path_blob[0] + else: + # for customized url. blob name that has directory info cannot be parsed. + path_blob = parsed_url.path.lstrip('/').split('/') + if len(path_blob) > 2: + account_path = "/" + "/".join(path_blob[:-2]) + + account_url = f"{parsed_url.scheme}://{parsed_url.netloc.rstrip('/')}{account_path}?{parsed_url.query}" + + msg_invalid_url = "Invalid URL. Provide a blob_url with a valid blob and container name." + if len(path_blob) <= 1: + raise ValueError(msg_invalid_url) + container_name, blob_name = unquote(path_blob[-2]), unquote(path_blob[-1]) + if not container_name or not blob_name: + raise ValueError(msg_invalid_url) + + path_snapshot, _ = parse_query(parsed_url.query) + if snapshot: + if isinstance(snapshot, BlobProperties): + path_snapshot = snapshot.snapshot + elif isinstance(snapshot, dict): + path_snapshot = snapshot['snapshot'] + else: + path_snapshot = snapshot + return (account_url, container_name, blob_name, path_snapshot) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_service_client.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_service_client.py index 8658363f8248..f6e17cb756f0 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_service_client.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_service_client.py @@ -3,56 +3,55 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-keyword-should-match-keyword-only import functools -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, Dict, List, +import warnings +from typing import ( + Any, Dict, List, Optional, Union, TYPE_CHECKING ) +from typing_extensions import Self - -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse # type: ignore - -from azure.core.paging import ItemPaged from azure.core.exceptions import HttpResponseError +from azure.core.paging import ItemPaged from azure.core.pipeline import Pipeline from azure.core.tracing.decorator import distributed_trace - -from ._shared.models import LocationMode -from ._shared.base_client import StorageAccountHostsMixin, TransportWrapper, parse_connection_str, parse_query -from ._shared.parser import _to_utc_datetime -from ._shared.response_handlers import return_response_headers, process_storage_error, \ - parse_to_internal_user_delegation_key -from ._generated import AzureBlobStorage -from ._generated.models import StorageServiceProperties, KeyInfo -from ._container_client import ContainerClient from ._blob_client import BlobClient -from ._models import ContainerPropertiesPaged +from ._blob_service_client_helpers import _parse_url +from ._container_client import ContainerClient +from ._deserialize import service_properties_deserialize, service_stats_deserialize +from ._encryption import StorageEncryptionMixin +from ._generated import AzureBlobStorage +from ._generated.models import KeyInfo, StorageServiceProperties from ._list_blobs_helper import FilteredBlobPaged +from ._models import BlobProperties, ContainerProperties, ContainerPropertiesPaged, CorsRule from ._serialize import get_api_version -from ._deserialize import service_stats_deserialize, service_properties_deserialize +from ._shared.base_client import parse_connection_str, parse_query, StorageAccountHostsMixin, TransportWrapper +from ._shared.models import LocationMode +from ._shared.parser import _to_utc_datetime +from ._shared.response_handlers import ( + parse_to_internal_user_delegation_key, + process_storage_error, + return_response_headers +) if TYPE_CHECKING: + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential from datetime import datetime - from ._shared.models import UserDelegationKey from ._lease import BlobLeaseClient from ._models import ( - ContainerProperties, - BlobProperties, - PublicAccess, BlobAnalyticsLogging, + FilteredBlob, Metrics, - CorsRule, + PublicAccess, RetentionPolicy, - StaticWebsite, - FilteredBlob + StaticWebsite ) + from ._shared.models import UserDelegationKey -class BlobServiceClient(StorageAccountHostsMixin): +class BlobServiceClient(StorageAccountHostsMixin, StorageEncryptionMixin): """A client to interact with the Blob Service at the account level. This client provides operations to retrieve and configure the account properties @@ -60,6 +59,10 @@ class BlobServiceClient(StorageAccountHostsMixin): For operations relating to a specific container or blob, clients for those entities can also be retrieved using the `get_client` functions. + For more optional configuration, please click + `here `__. + :param str account_url: The URL to the blob storage account. Any other entities included in the URL path (e.g. container or blob) will be discarded. This URL can be optionally @@ -67,13 +70,15 @@ class BlobServiceClient(StorageAccountHostsMixin): :param credential: The credentials with which to authenticate. This is optional if the account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. :keyword str api_version: - The Storage API version to use for requests. Default value is '2019-07-07'. - Setting to an older version may result in reduced feature compatibility. + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. .. versionadded:: 12.2.0 @@ -92,6 +97,9 @@ class BlobServiceClient(StorageAccountHostsMixin): the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, or 4MB. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. .. admonition:: Example: @@ -107,43 +115,39 @@ class BlobServiceClient(StorageAccountHostsMixin): :end-before: [END create_blob_service_client_oauth] :language: python :dedent: 8 - :caption: Creating the BlobServiceClient with Azure Identity credentials. + :caption: Creating the BlobServiceClient with Default Azure Identity credentials. """ def __init__( - self, account_url, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None - try: - if not account_url.lower().startswith('http'): - account_url = "https://" + account_url - except AttributeError: - raise ValueError("Account URL must be a string.") - parsed_url = urlparse(account_url.rstrip('/')) - if not parsed_url.netloc: - raise ValueError("Invalid URL: {}".format(account_url)) - + self, account_url: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: + parsed_url, sas_token = _parse_url(account_url=account_url) _, sas_token = parse_query(parsed_url.query) self._query_str, credential = self._format_query_string(sas_token, credential) super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._configure_encryption(kwargs) def _format_url(self, hostname): """Format the endpoint URL according to the current location mode hostname. + + :param str hostname: + The hostname of the current location mode. + :returns: A formatted endpoint URL including current location mode hostname. + :rtype: str """ - return "{}://{}/{}".format(self.scheme, hostname, self._query_str) + return f"{self.scheme}://{hostname}/{self._query_str}" @classmethod def from_connection_string( - cls, conn_str, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): # type: (...) -> BlobServiceClient + cls, conn_str: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: """Create BlobServiceClient from a Connection String. :param str conn_str: @@ -152,9 +156,19 @@ def from_connection_string( The credentials with which to authenticate. This is optional if the account URL already has a SAS token, or the connection string already has shared access key values. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account shared access - key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. Credentials provided here will take precedence over those in the connection string. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. :returns: A Blob service client. :rtype: ~azure.storage.blob.BlobServiceClient @@ -173,11 +187,11 @@ def from_connection_string( return cls(account_url, credential=credential, **kwargs) @distributed_trace - def get_user_delegation_key(self, key_start_time, # type: datetime - key_expiry_time, # type: datetime - **kwargs # type: Any - ): - # type: (...) -> UserDelegationKey + def get_user_delegation_key( + self, key_start_time: "datetime", + key_expiry_time: "datetime", + **kwargs: Any + ) -> "UserDelegationKey": """ Obtain a user delegation key for the purpose of signing SAS tokens. A token credential must be present on the service object for this request to succeed. @@ -187,8 +201,12 @@ def get_user_delegation_key(self, key_start_time, # type: datetime :param ~datetime.datetime key_expiry_time: A DateTime value. Indicates when the key stops being valid. :keyword int timeout: - The timeout parameter is expressed in seconds. - :return: The user delegation key. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: The user delegation key. :rtype: ~azure.storage.blob.UserDelegationKey """ key_info = KeyInfo(start=_to_utc_datetime(key_start_time), expiry=_to_utc_datetime(key_expiry_time)) @@ -203,8 +221,7 @@ def get_user_delegation_key(self, key_start_time, # type: datetime return parse_to_internal_user_delegation_key(user_delegation_key) # type: ignore @distributed_trace - def get_account_information(self, **kwargs): - # type: (Any) -> Dict[str, str] + def get_account_information(self, **kwargs: Any) -> Dict[str, str]: """Gets information related to the storage account. The information can also be retrieved if the user has a SAS to a container or blob. @@ -228,8 +245,7 @@ def get_account_information(self, **kwargs): process_storage_error(error) @distributed_trace - def get_service_stats(self, **kwargs): - # type: (**Any) -> Dict[str, Any] + def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: """Retrieves statistics related to replication for the Blob service. It is only available when read-access geo-redundant replication is enabled for @@ -249,8 +265,12 @@ def get_service_stats(self, **kwargs): replication is enabled for your storage account. :keyword int timeout: - The timeout parameter is expressed in seconds. - :return: The blob service stats. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: The blob service stats. :rtype: Dict[str, Any] .. admonition:: Example: @@ -271,13 +291,16 @@ def get_service_stats(self, **kwargs): process_storage_error(error) @distributed_trace - def get_service_properties(self, **kwargs): - # type: (Any) -> Dict[str, Any] + def get_service_properties(self, **kwargs: Any) -> Dict[str, Any]: """Gets the properties of a storage account's Blob service, including Azure Storage Analytics. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An object containing blob service properties such as analytics logging, hour/minute metrics, cors rules, etc. :rtype: Dict[str, Any] @@ -300,16 +323,15 @@ def get_service_properties(self, **kwargs): @distributed_trace def set_service_properties( - self, analytics_logging=None, # type: Optional[BlobAnalyticsLogging] - hour_metrics=None, # type: Optional[Metrics] - minute_metrics=None, # type: Optional[Metrics] - cors=None, # type: Optional[List[CorsRule]] - target_version=None, # type: Optional[str] - delete_retention_policy=None, # type: Optional[RetentionPolicy] - static_website=None, # type: Optional[StaticWebsite] - **kwargs - ): - # type: (...) -> None + self, analytics_logging: Optional["BlobAnalyticsLogging"] = None, + hour_metrics: Optional["Metrics"] = None, + minute_metrics: Optional["Metrics"] = None, + cors: Optional[List[CorsRule]] = None, + target_version: Optional[str] = None, + delete_retention_policy: Optional["RetentionPolicy"] = None, + static_website: Optional["StaticWebsite"] = None, + **kwargs: Any + ) -> None: """Sets the properties of a storage account's Blob service, including Azure Storage Analytics. @@ -344,7 +366,11 @@ def set_service_properties( and if yes, indicates the index document and 404 error document to use. :type static_website: ~azure.storage.blob.StaticWebsite :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -365,7 +391,7 @@ def set_service_properties( logging=analytics_logging, hour_metrics=hour_metrics, minute_metrics=minute_metrics, - cors=cors, + cors=CorsRule._to_generated(cors), # pylint: disable=protected-access default_service_version=target_version, delete_retention_policy=delete_retention_policy, static_website=static_website @@ -378,11 +404,10 @@ def set_service_properties( @distributed_trace def list_containers( - self, name_starts_with=None, # type: Optional[str] - include_metadata=False, # type: Optional[bool] - **kwargs - ): - # type: (...) -> ItemPaged[ContainerProperties] + self, name_starts_with: Optional[str] = None, + include_metadata: bool = False, + **kwargs: Any + ) -> ItemPaged[ContainerProperties]: """Returns a generator to list the containers under the specified account. The generator will lazily follow the continuation tokens returned by @@ -398,11 +423,18 @@ def list_containers( Specifies that deleted containers to be returned in the response. This is for container restore enabled account. The default value is `False`. .. versionadded:: 12.4.0 + :keyword bool include_system: + Flag specifying that system containers should be included. + .. versionadded:: 12.10.0 :keyword int results_per_page: The maximum number of container names to retrieve per API call. If the request does not specify the server will return up to 5,000 items. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) of ContainerProperties. :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.ContainerProperties] @@ -419,6 +451,9 @@ def list_containers( include_deleted = kwargs.pop('include_deleted', None) if include_deleted: include.append("deleted") + include_system = kwargs.pop('include_system', None) + if include_system: + include.append("system") timeout = kwargs.pop('timeout', None) results_per_page = kwargs.pop('results_per_page', None) @@ -436,8 +471,7 @@ def list_containers( ) @distributed_trace - def find_blobs_by_tags(self, filter_expression, **kwargs): - # type: (str, **Any) -> ItemPaged[FilteredBlob] + def find_blobs_by_tags(self, filter_expression: str, **kwargs: Any) -> ItemPaged["FilteredBlob"]: """The Filter Blobs operation enables callers to list blobs across all containers whose tags match a given search expression. Filter blobs searches across all containers within a storage account but can be @@ -450,7 +484,11 @@ def find_blobs_by_tags(self, filter_expression, **kwargs): :keyword int results_per_page: The max result per page when paginating. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) response of BlobProperties. :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.FilteredBlob] """ @@ -468,12 +506,11 @@ def find_blobs_by_tags(self, filter_expression, **kwargs): @distributed_trace def create_container( - self, name, # type: str - metadata=None, # type: Optional[Dict[str, str]] - public_access=None, # type: Optional[Union[PublicAccess, str]] - **kwargs - ): - # type: (...) -> ContainerClient + self, name: str, + metadata: Optional[Dict[str, str]] = None, + public_access: Optional[Union["PublicAccess", str]] = None, + **kwargs: Any + ) -> ContainerClient: """Creates a new container under the specified account. If the container with the same name already exists, a ResourceExistsError will @@ -496,7 +533,12 @@ def create_container( :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: A container client to interact with the newly created container. :rtype: ~azure.storage.blob.ContainerClient .. admonition:: Example: @@ -517,11 +559,10 @@ def create_container( @distributed_trace def delete_container( - self, container, # type: Union[ContainerProperties, str] - lease=None, # type: Optional[Union[BlobLeaseClient, str]] - **kwargs - ): - # type: (...) -> None + self, container: Union[ContainerProperties, str], + lease: Optional[Union["BlobLeaseClient", str]] = None, + **kwargs: Any + ) -> None: """Marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. @@ -535,7 +576,7 @@ def delete_container( If specified, delete_container only succeeds if the container's lease is active and matches this ID. Required if the container has an active lease. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :type lease: ~azure.storage.blob.BlobLeaseClient or str :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -554,8 +595,11 @@ def delete_container( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. - :rtype: None + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. .. admonition:: Example: @@ -566,17 +610,55 @@ def delete_container( :dedent: 12 :caption: Deleting a container in the blob service. """ - container = self.get_container_client(container) # type: ignore + container_client = self.get_container_client(container) kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) - container.delete_container( # type: ignore + container_client.delete_container( lease=lease, timeout=timeout, **kwargs) @distributed_trace - def undelete_container(self, deleted_container_name, deleted_container_version, **kwargs): - # type: (str, str, str, **Any) -> ContainerClient + def _rename_container(self, name: str, new_name: str, **kwargs: Any) -> ContainerClient: + """Renames a container. + + Operation is successful only if the source container exists. + + :param str name: + The name of the container to rename. + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: A container client for the renamed container. + :rtype: ~azure.storage.blob.ContainerClient + """ + renamed_container = self.get_container_client(new_name) + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id + except AttributeError: + kwargs['source_lease_id'] = lease + try: + renamed_container._client.container.rename(name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def undelete_container( + self, deleted_container_name: str, + deleted_container_version: str, + **kwargs: Any + ) -> ContainerClient: """Restores soft-deleted container. Operation will only be successful if used within the specified number of days @@ -589,14 +671,18 @@ def undelete_container(self, deleted_container_name, deleted_container_version, Specifies the name of the deleted container to restore. :param str deleted_container_version: Specifies the version of the deleted container to restore. - :keyword str new_name: - The new name for the deleted container to be restored to. - If not specified deleted_container_name will be used as the restored container name. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: The undeleted ContainerClient. :rtype: ~azure.storage.blob.ContainerClient """ new_name = kwargs.pop('new_name', None) + if new_name: + warnings.warn("`new_name` is no longer supported.", DeprecationWarning) container = self.get_container_client(new_name or deleted_container_name) try: container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable = protected-access @@ -606,8 +692,7 @@ def undelete_container(self, deleted_container_name, deleted_container_version, except HttpResponseError as error: process_storage_error(error) - def get_container_client(self, container): - # type: (Union[ContainerProperties, str]) -> ContainerClient + def get_container_client(self, container: Union[ContainerProperties, str]) -> ContainerClient: """Get a client to interact with the specified container. The container need not already exist. @@ -628,9 +713,9 @@ def get_container_client(self, container): :dedent: 8 :caption: Getting the container client to interact with a specific container. """ - try: + if isinstance(container, ContainerProperties): container_name = container.name - except AttributeError: + else: container_name = container _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access @@ -640,15 +725,16 @@ def get_container_client(self, container): self.url, container_name=container_name, credential=self.credential, api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function) + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) def get_blob_client( - self, container, # type: Union[ContainerProperties, str] - blob, # type: Union[BlobProperties, str] - snapshot=None # type: Optional[Union[Dict[str, Any], str]] - ): - # type: (...) -> BlobClient + self, container: Union[ContainerProperties, str], + blob: str, + snapshot: Optional[Union[Dict[str, Any], str]] = None, + *, + version_id: Optional[str] = None + ) -> BlobClient: """Get a client to interact with the specified blob. The blob need not already exist. @@ -657,14 +743,13 @@ def get_blob_client( The container that the blob is in. This can either be the name of the container, or an instance of ContainerProperties. :type container: str or ~azure.storage.blob.ContainerProperties - :param blob: - The blob with which to interact. This can either be the name of the blob, - or an instance of BlobProperties. - :type blob: str or ~azure.storage.blob.BlobProperties + :param str blob: The name of the blob with which to interact. :param snapshot: The optional blob snapshot on which to operate. This can either be the ID of the snapshot, or a dictionary output returned by :func:`~azure.storage.blob.BlobClient.create_snapshot()`. :type snapshot: str or dict(str, Any) + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. :returns: A BlobClient. :rtype: ~azure.storage.blob.BlobClient @@ -677,21 +762,27 @@ def get_blob_client( :dedent: 12 :caption: Getting the blob client to interact with a specific blob. """ - try: - container_name = container.name - except AttributeError: - container_name = container - try: + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob_name = blob.name - except AttributeError: + else: blob_name = blob + if isinstance(container, ContainerProperties): + container_name = container.name + else: + container_name = container _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access policies=self._pipeline._impl_policies # pylint: disable = protected-access ) - return BlobClient( # type: ignore + return BlobClient( self.url, container_name=container_name, blob_name=blob_name, snapshot=snapshot, credential=self.credential, api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function) + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, + version_id=version_id) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_service_client_helpers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_service_client_helpers.py new file mode 100644 index 000000000000..d2de950b7c83 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_blob_service_client_helpers.py @@ -0,0 +1,27 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from typing import Any, Tuple, TYPE_CHECKING +from urllib.parse import urlparse +from ._shared.base_client import parse_query + +if TYPE_CHECKING: + from urllib.parse import ParseResult + + +def _parse_url(account_url: str) -> Tuple["ParseResult", Any]: + try: + if not account_url.lower().startswith('http'): + account_url = "https://" + account_url + except AttributeError as exc: + raise ValueError("Account URL must be a string.") from exc + parsed_url = urlparse(account_url.rstrip('/')) + if not parsed_url.netloc: + raise ValueError(f"Invalid URL: {account_url}") + + _, sas_token = parse_query(parsed_url.query) + + return parsed_url, sas_token diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_container_client.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_container_client.py index 8788d14b704a..783df6bc753e 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_container_client.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_container_client.py @@ -1,81 +1,82 @@ -# pylint: disable=too-many-lines # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only import functools -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, AnyStr, Dict, List, Tuple, IO, Iterator, +import warnings +from datetime import datetime +from typing import ( + Any, AnyStr, cast, Dict, List, IO, Iterable, Iterator, Optional, overload, Union, TYPE_CHECKING ) +from urllib.parse import unquote, urlparse +from typing_extensions import Self - -try: - from urllib.parse import urlparse, quote, unquote -except ImportError: - from urlparse import urlparse # type: ignore - from urllib2 import quote, unquote # type: ignore - -import six - -from azure.core import MatchConditions -from azure.core.exceptions import HttpResponseError +from azure.core.exceptions import HttpResponseError, ResourceNotFoundError from azure.core.paging import ItemPaged -from azure.core.tracing.decorator import distributed_trace from azure.core.pipeline import Pipeline -from azure.core.pipeline.transport import HttpRequest - -from ._shared.base_client import StorageAccountHostsMixin, TransportWrapper, parse_connection_str, parse_query -from ._shared.request_handlers import add_metadata_headers, serialize_iso -from ._shared.response_handlers import ( - process_storage_error, - return_response_headers, - return_headers_and_deserialized) +from azure.core.tracing.decorator import distributed_trace +from ._blob_client import BlobClient +from ._container_client_helpers import ( + _format_url, + _generate_delete_blobs_options, + _generate_set_tiers_options, + _parse_url +) +from ._deserialize import deserialize_container_properties +from ._download import StorageStreamDownloader +from ._encryption import StorageEncryptionMixin from ._generated import AzureBlobStorage from ._generated.models import SignedIdentifier -from ._deserialize import deserialize_container_properties -from ._serialize import get_modify_conditions, get_container_cpk_scope_info, get_api_version, get_access_conditions -from ._models import ( # pylint: disable=unused-import - ContainerProperties, - BlobProperties, - BlobType) -from ._list_blobs_helper import BlobPrefix, BlobPropertiesPaged from ._lease import BlobLeaseClient -from ._blob_client import BlobClient +from ._list_blobs_helper import ( + BlobNamesPaged, + BlobPrefix, + BlobPropertiesPaged, + FilteredBlobPaged, + IgnoreListBlobsDeserializer +) +from ._models import ( + BlobProperties, + BlobType, + ContainerProperties, + FilteredBlob +) +from ._serialize import get_access_conditions, get_api_version, get_container_cpk_scope_info, get_modify_conditions +from ._shared.base_client import parse_connection_str, StorageAccountHostsMixin, TransportWrapper +from ._shared.request_handlers import add_metadata_headers, serialize_iso +from ._shared.response_handlers import ( + process_storage_error, + return_headers_and_deserialized, + return_response_headers +) if TYPE_CHECKING: - from azure.core.pipeline.transport import HttpTransport, HttpResponse # pylint: disable=ungrouped-imports - from azure.core.pipeline.policies import HTTPPolicy # pylint: disable=ungrouped-imports - from datetime import datetime - from ._models import ( # pylint: disable=unused-import - PublicAccess, + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential + from azure.core.pipeline.transport import HttpResponse # pylint: disable=C4756 + from azure.storage.blob import BlobServiceClient + from ._models import ( AccessPolicy, - ContentSettings, - StandardBlobTier, - PremiumPageBlobTier) - - -def _get_blob_name(blob): - """Return the blob name. - - :param blob: A blob string or BlobProperties - :rtype: str - """ - try: - return blob.get('name') - except AttributeError: - return blob + PremiumPageBlobTier, + PublicAccess, + StandardBlobTier + ) -class ContainerClient(StorageAccountHostsMixin): +class ContainerClient(StorageAccountHostsMixin, StorageEncryptionMixin): # pylint: disable=too-many-public-methods """A client to interact with a specific container, although that container may not yet exist. For operations relating to a specific blob within this container, a blob client can be retrieved using the :func:`~get_blob_client` function. + For more optional configuration, please click + `here `__. + :param str account_url: The URI to the storage account. In order to create a client given the full URI to the container, use the :func:`from_container_url` classmethod. @@ -85,13 +86,15 @@ class ContainerClient(StorageAccountHostsMixin): :param credential: The credentials with which to authenticate. This is optional if the account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. :keyword str api_version: - The Storage API version to use for requests. Default value is '2019-07-07'. - Setting to an older version may result in reduced feature compatibility. + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. .. versionadded:: 12.2.0 @@ -110,6 +113,9 @@ class ContainerClient(StorageAccountHostsMixin): the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, or 4MB. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. .. admonition:: Example: @@ -128,44 +134,41 @@ class ContainerClient(StorageAccountHostsMixin): :caption: Creating the container client directly. """ def __init__( - self, account_url, # type: str - container_name, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None - try: - if not account_url.lower().startswith('http'): - account_url = "https://" + account_url - except AttributeError: - raise ValueError("Container URL must be a string.") - parsed_url = urlparse(account_url.rstrip('/')) - if not container_name: - raise ValueError("Please specify a container name.") - if not parsed_url.netloc: - raise ValueError("Invalid URL: {}".format(account_url)) + self, account_url: str, + container_name: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: + parsed_url, sas_token = _parse_url(account_url=account_url, container_name=container_name) - _, sas_token = parse_query(parsed_url.query) self.container_name = container_name + # This parameter is used for the hierarchy traversal. Give precedence to credential. + self._raw_credential = credential if credential else sas_token self._query_str, credential = self._format_query_string(sas_token, credential) super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._api_version = get_api_version(kwargs) + self._client = self._build_generated_client() + self._configure_encryption(kwargs) + + def _build_generated_client(self) -> AzureBlobStorage: + client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access + return client def _format_url(self, hostname): - container_name = self.container_name - if isinstance(container_name, six.text_type): - container_name = container_name.encode('UTF-8') - return "{}://{}/{}{}".format( - self.scheme, - hostname, - quote(container_name), - self._query_str) + return _format_url( + container_name=self.container_name, + hostname=hostname, + scheme=self.scheme, + query_str=self._query_str + ) @classmethod - def from_container_url(cls, container_url, credential=None, **kwargs): - # type: (str, Optional[Any], Any) -> ContainerClient + def from_container_url( + cls, container_url: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: """Create ContainerClient from a container url. :param str container_url: @@ -176,31 +179,37 @@ def from_container_url(cls, container_url, credential=None, **kwargs): The credentials with which to authenticate. This is optional if the account URL already has a SAS token, or the connection string already has shared access key values. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account shared access - key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. :returns: A container client. :rtype: ~azure.storage.blob.ContainerClient """ try: if not container_url.lower().startswith('http'): container_url = "https://" + container_url - except AttributeError: - raise ValueError("Container URL must be a string.") - parsed_url = urlparse(container_url.rstrip('/')) + except AttributeError as exc: + raise ValueError("Container URL must be a string.") from exc + parsed_url = urlparse(container_url) if not parsed_url.netloc: - raise ValueError("Invalid URL: {}".format(container_url)) + raise ValueError(f"Invalid URL: {container_url}") - container_path = parsed_url.path.lstrip('/').split('/') + container_path = parsed_url.path.strip('/').split('/') account_path = "" if len(container_path) > 1: account_path = "/" + "/".join(container_path[:-1]) - account_url = "{}://{}{}?{}".format( - parsed_url.scheme, - parsed_url.netloc.rstrip('/'), - account_path, - parsed_url.query) + account_url = f"{parsed_url.scheme}://{parsed_url.netloc.rstrip('/')}{account_path}?{parsed_url.query}" container_name = unquote(container_path[-1]) if not container_name: raise ValueError("Invalid URL. Please provide a URL with a valid container name") @@ -208,11 +217,11 @@ def from_container_url(cls, container_url, credential=None, **kwargs): @classmethod def from_connection_string( - cls, conn_str, # type: str - container_name, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): # type: (...) -> ContainerClient + cls, conn_str: str, + container_name: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: """Create ContainerClient from a Connection String. :param str conn_str: @@ -224,9 +233,19 @@ def from_connection_string( The credentials with which to authenticate. This is optional if the account URL already has a SAS token, or the connection string already has shared access key values. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account shared access - key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. Credentials provided here will take precedence over those in the connection string. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. :returns: A container client. :rtype: ~azure.storage.blob.ContainerClient @@ -246,8 +265,11 @@ def from_connection_string( account_url, container_name=container_name, credential=credential, **kwargs) @distributed_trace - def create_container(self, metadata=None, public_access=None, **kwargs): - # type: (Optional[Dict[str, str]], Optional[Union[PublicAccess, str]], **Any) -> None + def create_container( + self, metadata: Optional[Dict[str, str]] = None, + public_access: Optional[Union["PublicAccess", str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, "datetime"]]: """ Creates a new container under the specified account. If the container with the same name already exists, the operation fails. @@ -266,8 +288,13 @@ def create_container(self, metadata=None, public_access=None, **kwargs): :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope :keyword int timeout: - The timeout parameter is expressed in seconds. - :rtype: None + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: A dictionary of response headers. + :rtype: Dict[str, Union[str, datetime]] .. admonition:: Example: @@ -294,9 +321,45 @@ def create_container(self, metadata=None, public_access=None, **kwargs): process_storage_error(error) @distributed_trace - def delete_container( - self, **kwargs): - # type: (Any) -> None + def _rename_container(self, new_name: str, **kwargs: Any) -> "ContainerClient": + """Renames a container. + + Operation is successful only if the source container exists. + + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :type lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: The renamed container client. + :rtype: ~azure.storage.blob.ContainerClient + """ + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id + except AttributeError: + kwargs['source_lease_id'] = lease + try: + renamed_container = ContainerClient( + f"{self.scheme}://{self.primary_hostname}", container_name=new_name, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) + renamed_container._client.container.rename(self.container_name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def delete_container(self, **kwargs: Any) -> None: """ Marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. @@ -324,7 +387,11 @@ def delete_container( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -351,10 +418,10 @@ def delete_container( @distributed_trace def acquire_lease( - self, lease_duration=-1, # type: int - lease_id=None, # type: Optional[str] - **kwargs): - # type: (...) -> BlobLeaseClient + self, lease_duration: int =-1, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> BlobLeaseClient: """ Requests a new lease. If the container does not have an active lease, the Blob service creates a lease on the container and returns a new @@ -386,7 +453,11 @@ def acquire_lease( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A BlobLeaseClient object, that can be run in a context manager. :rtype: ~azure.storage.blob.BlobLeaseClient @@ -406,8 +477,7 @@ def acquire_lease( return lease @distributed_trace - def get_account_information(self, **kwargs): - # type: (**Any) -> Dict[str, str] + def get_account_information(self, **kwargs: Any) -> Dict[str, str]: """Gets information related to the storage account. The information can also be retrieved if the user has a SAS to a container or blob. @@ -422,8 +492,7 @@ def get_account_information(self, **kwargs): process_storage_error(error) @distributed_trace - def get_container_properties(self, **kwargs): - # type: (Any) -> ContainerProperties + def get_container_properties(self, **kwargs: Any) -> ContainerProperties: """Returns all user-defined metadata and system properties for the specified container. The data returned does not include the container's list of blobs. @@ -432,7 +501,11 @@ def get_container_properties(self, **kwargs): container's lease is active and matches this ID. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: Properties for the specified container within a container object. :rtype: ~azure.storage.blob.ContainerProperties @@ -460,11 +533,33 @@ def get_container_properties(self, **kwargs): return response # type: ignore @distributed_trace - def set_container_metadata( # type: ignore - self, metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + def exists(self, **kwargs: Any) -> bool: + """ + Returns True if a container exists and returns False otherwise. + + :kwarg int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: boolean + :rtype: bool + """ + try: + self._client.container.get_properties(**kwargs) + return True + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceNotFoundError: + return False + + @distributed_trace + def set_container_metadata( + self, metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, "datetime"]]: """Sets one or more user-defined name-value pairs for the specified container. Each call to this operation replaces all existing metadata attached to the container. To remove all metadata from the container, @@ -494,7 +589,11 @@ def set_container_metadata( # type: ignore An ETag value, or the wildcard character (*). Used to check if the resource has changed, and act according to the condition specified by the `match_condition` parameter. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Container-updated property dict (Etag and last modified). :rtype: dict[str, str or datetime] @@ -525,8 +624,40 @@ def set_container_metadata( # type: ignore process_storage_error(error) @distributed_trace - def get_container_access_policy(self, **kwargs): - # type: (Any) -> Dict[str, Any] + def _get_blob_service_client(self) -> "BlobServiceClient": + """Get a client to interact with the container's parent service account. + + Defaults to current container's credentials. + + :returns: A BlobServiceClient. + :rtype: ~azure.storage.blob.BlobServiceClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START get_blob_service_client_from_container_client] + :end-before: [END get_blob_service_client_from_container_client] + :language: python + :dedent: 8 + :caption: Get blob service client from container object. + """ + from ._blob_service_client import BlobServiceClient + if not isinstance(self._pipeline._transport, TransportWrapper): # pylint: disable = protected-access + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline + return BlobServiceClient( + f"{self.scheme}://{self.primary_hostname}", + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _location_mode=self._location_mode, _hosts=self._hosts, require_encryption=self.require_encryption, + encryption_version=self.encryption_version, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function, _pipeline=_pipeline) + + @distributed_trace + def get_container_access_policy(self, **kwargs: Any) -> Dict[str, Any]: """Gets the permissions for the specified container. The permissions indicate whether container data may be accessed publicly. @@ -535,7 +666,11 @@ def get_container_access_policy(self, **kwargs): container's lease is active and matches this ID. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Access policy information in a dict. :rtype: dict[str, Any] @@ -566,10 +701,10 @@ def get_container_access_policy(self, **kwargs): @distributed_trace def set_container_access_policy( - self, signed_identifiers, # type: Dict[str, AccessPolicy] - public_access=None, # type: Optional[Union[str, PublicAccess]] - **kwargs - ): # type: (...) -> Dict[str, Union[str, datetime]] + self, signed_identifiers: Dict[str, "AccessPolicy"], + public_access: Optional[Union[str, "PublicAccess"]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets the permissions for the specified container or stored access policies that may be used with Shared Access Signatures. The permissions indicate whether blobs in a container may be accessed publicly. @@ -598,7 +733,11 @@ def set_container_access_policy( Specify this header to perform the operation only if the resource has not been modified since the specified date/time. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Container-updated property dict (Etag and last modified). :rtype: dict[str, str or ~datetime.datetime] @@ -627,20 +766,23 @@ def set_container_access_policy( access_conditions = get_access_conditions(lease) timeout = kwargs.pop('timeout', None) try: - return self._client.container.set_access_policy( + return cast(Dict[str, Union[str, datetime]], self._client.container.set_access_policy( container_acl=signed_identifiers or None, timeout=timeout, access=public_access, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, cls=return_response_headers, - **kwargs) + **kwargs)) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def list_blobs(self, name_starts_with=None, include=None, **kwargs): - # type: (Optional[str], Optional[Union[str, List[str]]], **Any) -> ItemPaged[BlobProperties] + def list_blobs( + self, name_starts_with: Optional[str] = None, + include: Optional[Union[str, List[str]]] = None, + **kwargs: Any + ) -> ItemPaged[BlobProperties]: """Returns a generator to list the blobs under the specified container. The generator will lazily follow the continuation tokens returned by the service. @@ -648,11 +790,17 @@ def list_blobs(self, name_starts_with=None, include=None, **kwargs): :param str name_starts_with: Filters the results to return only blobs whose names begin with the specified prefix. - :param list[str] or str include: + :param include: Specifies one or more additional datasets to include in the response. - Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'tags'. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', + 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. + :type include: list[str] or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) response of BlobProperties. :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] @@ -665,6 +813,10 @@ def list_blobs(self, name_starts_with=None, include=None, **kwargs): :dedent: 8 :caption: List the blobs in the container. """ + if kwargs.pop('prefix', None): + raise ValueError("Passing 'prefix' has no effect on filtering, " + + "please use the 'name_starts_with' parameter instead.") + if include and not isinstance(include, list): include = [include] @@ -676,17 +828,62 @@ def list_blobs(self, name_starts_with=None, include=None, **kwargs): timeout=timeout, **kwargs) return ItemPaged( - command, prefix=name_starts_with, results_per_page=results_per_page, + command, prefix=name_starts_with, results_per_page=results_per_page, container=self.container_name, page_iterator_class=BlobPropertiesPaged) + @distributed_trace + def list_blob_names(self, **kwargs: Any) -> ItemPaged[str]: + """Returns a generator to list the names of blobs under the specified container. + The generator will lazily follow the continuation tokens returned by + the service. + + Note that no additional properties or metadata will be returned when using this API. + Additionally, this API does not have an option to include additional blobs such as snapshots, + versions, soft-deleted blobs, etc. To get any of this data, use :func:`list_blobs()`. + + :keyword str name_starts_with: + Filters the results to return only blobs whose names + begin with the specified prefix. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: An iterable (auto-paging) response of blob names as strings. + :rtype: ~azure.core.paging.ItemPaged[str] + """ + if kwargs.pop('prefix', None): + raise ValueError("Passing 'prefix' has no effect on filtering, " + + "please use the 'name_starts_with' parameter instead.") + + name_starts_with = kwargs.pop('name_starts_with', None) + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + + # For listing only names we need to create a one-off generated client and + # override its deserializer to prevent deserialization of the full response. + client = self._build_generated_client() + client.container._deserialize = IgnoreListBlobsDeserializer() # pylint: disable=protected-access + + command = functools.partial( + client.container.list_blob_flat_segment, + timeout=timeout, + **kwargs) + return ItemPaged( + command, + prefix=name_starts_with, + results_per_page=results_per_page, + container=self.container_name, + page_iterator_class=BlobNamesPaged) + @distributed_trace def walk_blobs( - self, name_starts_with=None, # type: Optional[str] - include=None, # type: Optional[Any] - delimiter="/", # type: str - **kwargs # type: Optional[Any] - ): - # type: (...) -> ItemPaged[BlobProperties] + self, name_starts_with: Optional[str] = None, + include: Optional[Union[List[str], str]] = None, + delimiter: str = "/", + **kwargs: Any + ) -> ItemPaged[BlobProperties]: """Returns a generator to list the blobs under the specified container. The generator will lazily follow the continuation tokens returned by the service. This operation will list blobs in accordance with a hierarchy, @@ -695,19 +892,29 @@ def walk_blobs( :param str name_starts_with: Filters the results to return only blobs whose names begin with the specified prefix. - :param list[str] include: + :param include: Specifies one or more additional datasets to include in the response. - Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted'. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', + 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. + :type include: list[str] or str :param str delimiter: When the request includes this parameter, the operation returns a BlobPrefix element in the response body that acts as a placeholder for all blobs whose names begin with the same substring up to the appearance of the delimiter character. The delimiter may be a single character or a string. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) response of BlobProperties. :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] """ + if kwargs.pop('prefix', None): + raise ValueError("Passing 'prefix' has no effect on filtering, " + + "please use the 'name_starts_with' parameter instead.") + if include and not isinstance(include, list): include = [include] @@ -723,24 +930,58 @@ def walk_blobs( command, prefix=name_starts_with, results_per_page=results_per_page, + container=self.container_name, delimiter=delimiter) + @distributed_trace + def find_blobs_by_tags( + self, filter_expression: str, + **kwargs: Any + ) -> ItemPaged[FilteredBlob]: + """Returns a generator to list the blobs under the specified container whose tags + match the given search expression. + The generator will lazily follow the continuation tokens returned by + the service. + + :param str filter_expression: + The expression to find blobs whose tags matches the specified condition. + eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" + :keyword int results_per_page: + The max result per page when paginating. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: An iterable (auto-paging) response of FilteredBlob. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] + """ + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.container.filter_blobs, + timeout=timeout, + where=filter_expression, + **kwargs) + return ItemPaged( + command, results_per_page=results_per_page, container=self.container_name, + page_iterator_class=FilteredBlobPaged) + @distributed_trace def upload_blob( - self, name, # type: Union[str, BlobProperties] - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] - length=None, # type: Optional[int] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> BlobClient + self, name: str, + data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, + length: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs + ) -> BlobClient: """Creates a new blob from a data source with automatic chunking. - :param name: The blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type name: str or ~azure.storage.blob.BlobProperties + :param str name: The blob with which to interact. :param data: The blob data to upload. + :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. :param int length: @@ -795,9 +1036,12 @@ def upload_blob( .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make - multiple calls to the Azure service and the timeout will apply to - each call individually. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -829,6 +1073,11 @@ def upload_blob( :keyword str encoding: Defaults to UTF-8. + :keyword progress_hook: + A callback to track the progress of a long running upload. The signature is + function(current: int, total: Optional[int]) where current is the number of bytes transferred + so far, and total is the size of the blob or None if the size is unknown. + :paramtype progress_hook: Callable[[int, Optional[int]], None] :returns: A BlobClient to interact with the newly uploaded blob. :rtype: ~azure.storage.blob.BlobClient @@ -841,6 +1090,12 @@ def upload_blob( :dedent: 8 :caption: Upload blob to the container. """ + if isinstance(name, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param name is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob = self.get_blob_client(name) kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) @@ -858,11 +1113,10 @@ def upload_blob( @distributed_trace def delete_blob( - self, blob, # type: Union[str, BlobProperties] - delete_snapshots=None, # type: Optional[str] - **kwargs - ): - # type: (...) -> None + self, blob: str, + delete_snapshots: Optional[str] = None, + **kwargs: Any + ) -> None: """Marks the specified blob or snapshot for deletion. The blob is later deleted during garbage collection. @@ -874,11 +1128,9 @@ def delete_blob( and retains the blob or snapshot for specified number of days. After specified number of days, blob's data is removed from the service during garbage collection. Soft deleted blob or snapshot is accessible through :func:`list_blobs()` specifying `include=["deleted"]` - option. Soft-deleted blob or snapshot can be restored using :func:`~BlobClient.undelete()` + option. Soft-deleted blob or snapshot can be restored using :func:`~azure.storage.blob.BlobClient.undelete()` - :param blob: The blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type blob: str or ~azure.storage.blob.BlobProperties + :param str blob: The blob with which to interact. :param str delete_snapshots: Required if the blob has associated snapshots. Values include: - "only": Deletes only the blobs snapshots. @@ -888,6 +1140,7 @@ def delete_blob( value that, when present, specifies the version of the blob to delete. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword lease: @@ -918,9 +1171,19 @@ def delete_blob( .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None """ + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob_client = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) @@ -929,22 +1192,56 @@ def delete_blob( timeout=timeout, **kwargs) + @overload + def download_blob( + self, blob: str, + offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: str, + **kwargs: Any + ) -> StorageStreamDownloader[str]: + ... + + @overload + def download_blob( + self, blob: str, + offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: None = None, + **kwargs: Any + ) -> StorageStreamDownloader[bytes]: + ... + @distributed_trace - def download_blob(self, blob, offset=None, length=None, **kwargs): - # type: (Union[str, BlobProperties], Optional[int], Optional[int], **Any) -> StorageStreamDownloader + def download_blob( + self, blob: str, + offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: Union[str, None] = None, + **kwargs: Any + ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: """Downloads a blob to the StorageStreamDownloader. The readall() method must be used to read all the content or readinto() must be used to download the blob into - a stream. + a stream. Using chunks() returns an iterator which allows the user to iterate over the content in chunks. - :param blob: The blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type blob: str or ~azure.storage.blob.BlobProperties + :param str blob: The blob with which to interact. :param int offset: Start of byte range to use for downloading a section of the blob. Must be set if length is provided. :param int length: Number of bytes to read from the stream. This is optional, but should be supplied for optimal performance. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to download. + + .. versionadded:: 12.4.0 + + This keyword argument was introduced in API version '2019-12-12'. + :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the blob. The storage service checks the hash of the content that has arrived with the hash @@ -991,136 +1288,42 @@ def download_blob(self, blob, offset=None, length=None, **kwargs): The number of parallel connections with which to download. :keyword str encoding: Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword progress_hook: + A callback to track the progress of a long running download. The signature is + function(current: int, total: int) where current is the number of bytes transferred + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], None] :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. multiple calls to the Azure service and the timeout will apply to each call individually. :returns: A streaming object (StorageStreamDownloader) :rtype: ~azure.storage.blob.StorageStreamDownloader """ + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob_client = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) - return blob_client.download_blob(offset=offset, length=length, **kwargs) - - def _generate_delete_blobs_subrequest_options( - self, snapshot=None, - delete_snapshots=None, - lease_access_conditions=None, - modified_access_conditions=None, - **kwargs - ): - """This code is a copy from _generated. - - Once Autorest is able to provide request preparation this code should be removed. - """ - lease_id = None - if lease_access_conditions is not None: - lease_id = lease_access_conditions.lease_id - if_modified_since = None - if modified_access_conditions is not None: - if_modified_since = modified_access_conditions.if_modified_since - if_unmodified_since = None - if modified_access_conditions is not None: - if_unmodified_since = modified_access_conditions.if_unmodified_since - if_match = None - if modified_access_conditions is not None: - if_match = modified_access_conditions.if_match - if_none_match = None - if modified_access_conditions is not None: - if_none_match = modified_access_conditions.if_none_match - if_tags = None - if modified_access_conditions is not None: - if_tags = modified_access_conditions.if_tags - - # Construct parameters - timeout = kwargs.pop('timeout', None) - query_parameters = {} - if snapshot is not None: - query_parameters['snapshot'] = self._client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access - if timeout is not None: - query_parameters['timeout'] = self._client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access - - # Construct headers - header_parameters = {} - if delete_snapshots is not None: - header_parameters['x-ms-delete-snapshots'] = self._client._serialize.header( # pylint: disable=protected-access - "delete_snapshots", delete_snapshots, 'DeleteSnapshotsOptionType') - if lease_id is not None: - header_parameters['x-ms-lease-id'] = self._client._serialize.header( # pylint: disable=protected-access - "lease_id", lease_id, 'str') - if if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._client._serialize.header( # pylint: disable=protected-access - "if_modified_since", if_modified_since, 'rfc-1123') - if if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._client._serialize.header( # pylint: disable=protected-access - "if_unmodified_since", if_unmodified_since, 'rfc-1123') - if if_match is not None: - header_parameters['If-Match'] = self._client._serialize.header( # pylint: disable=protected-access - "if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._client._serialize.header( # pylint: disable=protected-access - "if_none_match", if_none_match, 'str') - if if_tags is not None: - header_parameters['x-ms-if-tags'] = self._client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access - - return query_parameters, header_parameters - - def _generate_delete_blobs_options(self, - *blobs, # type: List[Union[str, BlobProperties, dict]] - **kwargs - ): - timeout = kwargs.pop('timeout', None) - raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) - delete_snapshots = kwargs.pop('delete_snapshots', None) - if_modified_since = kwargs.pop('if_modified_since', None) - if_unmodified_since = kwargs.pop('if_unmodified_since', None) - if_tags_match_condition = kwargs.pop('if_tags_match_condition', None) - kwargs.update({'raise_on_any_failure': raise_on_any_failure, - 'sas': self._query_str.replace('?', '&'), - 'timeout': '&timeout=' + str(timeout) if timeout else "" - }) - - reqs = [] - for blob in blobs: - blob_name = _get_blob_name(blob) - container_name = self.container_name - - try: - options = BlobClient._generic_delete_blob_options( # pylint: disable=protected-access - snapshot=blob.get('snapshot'), - delete_snapshots=delete_snapshots or blob.get('delete_snapshots'), - lease=blob.get('lease_id'), - if_modified_since=if_modified_since or blob.get('if_modified_since'), - if_unmodified_since=if_unmodified_since or blob.get('if_unmodified_since'), - etag=blob.get('etag'), - if_tags_match_condition=if_tags_match_condition or blob.get('if_tags_match_condition'), - match_condition=blob.get('match_condition') or MatchConditions.IfNotModified if blob.get('etag') - else None, - timeout=blob.get('timeout'), - ) - except AttributeError: - options = BlobClient._generic_delete_blob_options( # pylint: disable=protected-access - delete_snapshots=delete_snapshots, - if_modified_since=if_modified_since, - if_unmodified_since=if_unmodified_since, - if_tags_match_condition=if_tags_match_condition - ) - - query_parameters, header_parameters = self._generate_delete_blobs_subrequest_options(**options) - - req = HttpRequest( - "DELETE", - "/{}/{}{}".format(quote(container_name), quote(blob_name, safe='/~'), self._query_str), - headers=header_parameters - ) - req.format_parameters(query_parameters) - reqs.append(req) - - return reqs, kwargs + return blob_client.download_blob( + offset=offset, + length=length, + encoding=encoding, + **kwargs) @distributed_trace - def delete_blobs(self, *blobs, **kwargs): - # type: (...) -> Iterator[HttpResponse] + def delete_blobs( # pylint: disable=delete-operation-wrong-return-type + self, *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any + ) -> Iterator["HttpResponse"]: """Marks the specified blobs or snapshots for deletion. The blobs are later deleted during garbage collection. @@ -1131,7 +1334,9 @@ def delete_blobs(self, *blobs, **kwargs): and retains the blobs or snapshots for specified number of days. After specified number of days, blobs' data is removed from the service during garbage collection. Soft deleted blobs or snapshots are accessible through :func:`list_blobs()` specifying `include=["deleted"]` - Soft-deleted blobs or snapshots can be restored using :func:`~BlobClient.undelete()` + Soft-deleted blobs or snapshots can be restored using :func:`~azure.storage.blob.BlobClient.undelete()` + + The maximum number of blobs that can be deleted in a single request is 256. :param blobs: The blobs to delete. This can be a single blob, or multiple values can @@ -1144,7 +1349,9 @@ def delete_blobs(self, *blobs, **kwargs): key: 'name', value type: str snapshot you want to delete: key: 'snapshot', value type: str - whether to delete snapthots when deleting blob: + version id: + key: 'version_id', value type: str + whether to delete snapshots when deleting blob: key: 'delete_snapshots', value: 'include' or 'only' if the blob modified or not: key: 'if_modified_since', 'if_unmodified_since', value type: datetime @@ -1159,7 +1366,7 @@ def delete_blobs(self, *blobs, **kwargs): timeout for subrequest: key: 'timeout', value type: int - :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :type blobs: Union[str, Dict[str, Any], BlobProperties] :keyword str delete_snapshots: Required if a blob has associated snapshots. Values include: - "only": Deletes only the blobs snapshots. @@ -1186,7 +1393,11 @@ def delete_blobs(self, *blobs, **kwargs): This is a boolean param which defaults to True. When this is set, an exception is raised even if there is a single operation failure. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: An iterator of responses, one for each blob in order :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] @@ -1200,111 +1411,33 @@ def delete_blobs(self, *blobs, **kwargs): :caption: Deleting multiple blobs. """ if len(blobs) == 0: - return iter(list()) - - reqs, options = self._generate_delete_blobs_options(*blobs, **kwargs) + return iter([]) + if self._is_localhost: + kwargs['url_prepend'] = self.account_name + + reqs, options = _generate_delete_blobs_options( + self._query_str, + self.container_name, + self._client, + *blobs, + **kwargs + ) return self._batch_send(*reqs, **options) - def _generate_set_tiers_subrequest_options( - self, tier, snapshot=None, version_id=None, rehydrate_priority=None, lease_access_conditions=None, **kwargs - ): - """This code is a copy from _generated. - - Once Autorest is able to provide request preparation this code should be removed. - """ - if not tier: - raise ValueError("A blob tier must be specified") - if snapshot and version_id: - raise ValueError("Snapshot and version_id cannot be set at the same time") - if_tags = kwargs.pop('if_tags', None) - - lease_id = None - if lease_access_conditions is not None: - lease_id = lease_access_conditions.lease_id - - comp = "tier" - timeout = kwargs.pop('timeout', None) - # Construct parameters - query_parameters = {} - if snapshot is not None: - query_parameters['snapshot'] = self._client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access - if version_id is not None: - query_parameters['versionid'] = self._client._serialize.query("version_id", version_id, 'str') # pylint: disable=protected-access - if timeout is not None: - query_parameters['timeout'] = self._client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access - query_parameters['comp'] = self._client._serialize.query("comp", comp, 'str') # pylint: disable=protected-access, specify-parameter-names-in-call - - # Construct headers - header_parameters = {} - header_parameters['x-ms-access-tier'] = self._client._serialize.header("tier", tier, 'str') # pylint: disable=protected-access, specify-parameter-names-in-call - if rehydrate_priority is not None: - header_parameters['x-ms-rehydrate-priority'] = self._client._serialize.header( # pylint: disable=protected-access - "rehydrate_priority", rehydrate_priority, 'str') - if lease_id is not None: - header_parameters['x-ms-lease-id'] = self._client._serialize.header("lease_id", lease_id, 'str') # pylint: disable=protected-access - if if_tags is not None: - header_parameters['x-ms-if-tags'] = self._client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access - - return query_parameters, header_parameters - - def _generate_set_tiers_options(self, - blob_tier, # type: Optional[Union[str, StandardBlobTier, PremiumPageBlobTier]] - *blobs, # type: List[Union[str, BlobProperties, dict]] - **kwargs - ): - timeout = kwargs.pop('timeout', None) - raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) - rehydrate_priority = kwargs.pop('rehydrate_priority', None) - if_tags = kwargs.pop('if_tags_match_condition', None) - kwargs.update({'raise_on_any_failure': raise_on_any_failure, - 'sas': self._query_str.replace('?', '&'), - 'timeout': '&timeout=' + str(timeout) if timeout else "" - }) - - reqs = [] - for blob in blobs: - blob_name = _get_blob_name(blob) - container_name = self.container_name - - try: - tier = blob_tier or blob.get('blob_tier') - query_parameters, header_parameters = self._generate_set_tiers_subrequest_options( - tier=tier, - snapshot=blob.get('snapshot'), - version_id=blob.get('version_id'), - rehydrate_priority=rehydrate_priority or blob.get('rehydrate_priority'), - lease_access_conditions=blob.get('lease_id'), - if_tags=if_tags or blob.get('if_tags_match_condition'), - timeout=timeout or blob.get('timeout') - ) - except AttributeError: - query_parameters, header_parameters = self._generate_set_tiers_subrequest_options( - blob_tier, rehydrate_priority=rehydrate_priority, if_tags=if_tags) - - req = HttpRequest( - "PUT", - "/{}/{}{}".format(quote(container_name), quote(blob_name, safe='/~'), self._query_str), - headers=header_parameters - ) - req.format_parameters(query_parameters) - reqs.append(req) - - return reqs, kwargs - @distributed_trace def set_standard_blob_tier_blobs( - self, - standard_blob_tier, # type: Optional[Union[str, StandardBlobTier]] - *blobs, # type: List[Union[str, BlobProperties, dict]] - **kwargs - ): - # type: (...) -> Iterator[HttpResponse] + self, standard_blob_tier: Optional[Union[str, "StandardBlobTier"]], + *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any + ) -> Iterator["HttpResponse"]: """This operation sets the tier on block blobs. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's ETag. + The maximum number of blobs that can be updated in a single request is 256. + :param standard_blob_tier: Indicates the tier to be set on all blobs. Options include 'Hot', 'Cool', 'Archive'. The hot tier is optimized for storing data that is accessed @@ -1334,7 +1467,7 @@ def set_standard_blob_tier_blobs( lease: key: 'lease_id', value type: Union[str, LeaseClient] snapshot: - key: "snapshost", value type: str + key: "snapshot", value type: str version id: key: "version_id", value type: str tags match condition: @@ -1342,7 +1475,7 @@ def set_standard_blob_tier_blobs( timeout for subrequest: key: 'timeout', value type: int - :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: Indicates the priority with which to rehydrate an archived blob :keyword str if_tags_match_condition: @@ -1352,27 +1485,39 @@ def set_standard_blob_tier_blobs( .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword bool raise_on_any_failure: This is a boolean param which defaults to True. When this is set, an exception is raised even if there is a single operation failure. :return: An iterator of responses, one for each blob in order :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] """ - reqs, options = self._generate_set_tiers_options(standard_blob_tier, *blobs, **kwargs) + if self._is_localhost: + kwargs['url_prepend'] = self.account_name + reqs, options = _generate_set_tiers_options( + self._query_str, + self.container_name, + standard_blob_tier, + self._client, + *blobs, + **kwargs) return self._batch_send(*reqs, **options) @distributed_trace def set_premium_page_blob_tier_blobs( - self, - premium_page_blob_tier, # type: Optional[Union[str, PremiumPageBlobTier]] - *blobs, # type: List[Union[str, BlobProperties, dict]] - **kwargs - ): - # type: (...) -> Iterator[HttpResponse] + self, premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]], + *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any + ) -> Iterator["HttpResponse"]: """Sets the page blob tiers on all blobs. This API is only supported for page blobs on premium accounts. + The maximum number of blobs that can be updated in a single request is 256. + :param premium_page_blob_tier: A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -1399,36 +1544,48 @@ def set_premium_page_blob_tier_blobs( timeout for subrequest: key: 'timeout', value type: int - :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make - multiple calls to the Azure service and the timeout will apply to - each call individually. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword bool raise_on_any_failure: This is a boolean param which defaults to True. When this is set, an exception is raised even if there is a single operation failure. :return: An iterator of responses, one for each blob in order - :rtype: iterator[~azure.core.pipeline.transport.HttpResponse] + :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] """ - reqs, options = self._generate_set_tiers_options(premium_page_blob_tier, *blobs, **kwargs) + if self._is_localhost: + kwargs['url_prepend'] = self.account_name + reqs, options = _generate_set_tiers_options( + self._query_str, + self.container_name, + premium_page_blob_tier, + self._client, + *blobs, + **kwargs) return self._batch_send(*reqs, **options) def get_blob_client( - self, blob, # type: Union[str, BlobProperties] - snapshot=None # type: str - ): - # type: (...) -> BlobClient + self, blob: str, + snapshot: Optional[str] = None, + *, + version_id: Optional[str] = None + ) -> BlobClient: """Get a client to interact with the specified blob. The blob need not already exist. - :param blob: + :param str blob: The blob with which to interact. - :type blob: str or ~azure.storage.blob.BlobProperties :param str snapshot: The optional blob snapshot on which to operate. This can be the snapshot ID string or the response returned from :func:`~BlobClient.create_snapshot()`. + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. :returns: A BlobClient. :rtype: ~azure.storage.blob.BlobClient @@ -1441,7 +1598,15 @@ def get_blob_client( :dedent: 8 :caption: Get the blob client. """ - blob_name = _get_blob_name(blob) + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) + blob_name = blob.get('name') + else: + blob_name = blob _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access policies=self._pipeline._impl_policies # pylint: disable = protected-access @@ -1450,5 +1615,6 @@ def get_blob_client( self.url, container_name=self.container_name, blob_name=blob_name, snapshot=snapshot, credential=self.credential, api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function) + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, + version_id=version_id) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_container_client_helpers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_container_client_helpers.py new file mode 100644 index 000000000000..82edd48dffb8 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_container_client_helpers.py @@ -0,0 +1,266 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union +from urllib.parse import quote, urlparse + +from azure.core import MatchConditions +from azure.core.pipeline.transport import HttpRequest +from ._blob_client_helpers import _generic_delete_blob_options +from ._generated import AzureBlobStorage +from ._models import BlobProperties +from ._shared.base_client import parse_query + +if TYPE_CHECKING: + from azure.storage.blob import RehydratePriority + from urllib.parse import ParseResult + from ._generated.models import LeaseAccessConditions, ModifiedAccessConditions + from ._models import PremiumPageBlobTier, StandardBlobTier + + +def _parse_url(account_url: str, container_name: str) -> Tuple["ParseResult", Any]: + try: + if not account_url.lower().startswith('http'): + account_url = "https://" + account_url + except AttributeError as exc: + raise ValueError("Container URL must be a string.") from exc + parsed_url = urlparse(account_url.rstrip('/')) + if not container_name: + raise ValueError("Please specify a container name.") + if not parsed_url.netloc: + raise ValueError(f"Invalid URL: {account_url}") + + _, sas_token = parse_query(parsed_url.query) + + return parsed_url, sas_token + +def _format_url(container_name: Union[bytes, str], hostname: str, scheme: str, query_str: str) -> str: + if isinstance(container_name, str): + container_name = container_name.encode('UTF-8') + return f"{scheme}://{hostname}/{quote(container_name)}{query_str}" + +# This code is a copy from _generated. +# Once Autorest is able to provide request preparation this code should be removed. +def _generate_delete_blobs_subrequest_options( + client: AzureBlobStorage, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + delete_snapshots: Optional[str] = None, + lease_access_conditions: Optional["LeaseAccessConditions"] = None, + modified_access_conditions: Optional["ModifiedAccessConditions"] = None, + **kwargs +) -> Tuple[Dict[str, Any], Dict[str, Any]]: + lease_id = None + if lease_access_conditions is not None: + lease_id = lease_access_conditions.lease_id + if_modified_since = None + if modified_access_conditions is not None: + if_modified_since = modified_access_conditions.if_modified_since + if_unmodified_since = None + if modified_access_conditions is not None: + if_unmodified_since = modified_access_conditions.if_unmodified_since + if_match = None + if modified_access_conditions is not None: + if_match = modified_access_conditions.if_match + if_none_match = None + if modified_access_conditions is not None: + if_none_match = modified_access_conditions.if_none_match + if_tags = None + if modified_access_conditions is not None: + if_tags = modified_access_conditions.if_tags + + # Construct parameters + timeout = kwargs.pop('timeout', None) + query_parameters = {} + if snapshot is not None: + query_parameters['snapshot'] = client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access + if version_id is not None: + query_parameters['versionid'] = client._serialize.query("version_id", version_id, 'str') # pylint: disable=protected-access + if timeout is not None: + query_parameters['timeout'] = client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access + + # Construct headers + header_parameters = {} + if delete_snapshots is not None: + header_parameters['x-ms-delete-snapshots'] = client._serialize.header( # pylint: disable=protected-access + "delete_snapshots", delete_snapshots, 'DeleteSnapshotsOptionType') + if lease_id is not None: + header_parameters['x-ms-lease-id'] = client._serialize.header( # pylint: disable=protected-access + "lease_id", lease_id, 'str') + if if_modified_since is not None: + header_parameters['If-Modified-Since'] = client._serialize.header( # pylint: disable=protected-access + "if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = client._serialize.header( # pylint: disable=protected-access + "if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + header_parameters['If-Match'] = client._serialize.header( # pylint: disable=protected-access + "if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = client._serialize.header( # pylint: disable=protected-access + "if_none_match", if_none_match, 'str') + if if_tags is not None: + header_parameters['x-ms-if-tags'] = client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access + + return query_parameters, header_parameters + +def _generate_delete_blobs_options( + query_str: str, + container_name: str, + client: AzureBlobStorage, + *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any +) -> Tuple[List[HttpRequest], Dict[str, Any]]: + timeout = kwargs.pop('timeout', None) + raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) + delete_snapshots = kwargs.pop('delete_snapshots', None) + if_modified_since = kwargs.pop('if_modified_since', None) + if_unmodified_since = kwargs.pop('if_unmodified_since', None) + if_tags_match_condition = kwargs.pop('if_tags_match_condition', None) + url_prepend = kwargs.pop('url_prepend', None) + kwargs.update({'raise_on_any_failure': raise_on_any_failure, + 'sas': query_str.replace('?', '&'), + 'timeout': '&timeout=' + str(timeout) if timeout else "", + 'path': container_name, + 'restype': 'restype=container&' + }) + + reqs = [] + for blob in blobs: + if not isinstance(blob, str): + blob_name = blob.get('name') + options = _generic_delete_blob_options( + snapshot=blob.get('snapshot'), + version_id=blob.get('version_id'), + delete_snapshots=delete_snapshots or blob.get('delete_snapshots'), + lease=blob.get('lease_id'), + if_modified_since=if_modified_since or blob.get('if_modified_since'), + if_unmodified_since=if_unmodified_since or blob.get('if_unmodified_since'), + etag=blob.get('etag'), + if_tags_match_condition=if_tags_match_condition or blob.get('if_tags_match_condition'), + match_condition=blob.get('match_condition') or MatchConditions.IfNotModified if blob.get('etag') + else None, + timeout=blob.get('timeout'), + ) + else: + blob_name = blob + options = _generic_delete_blob_options( + delete_snapshots=delete_snapshots, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags_match_condition=if_tags_match_condition + ) + + query_parameters, header_parameters = _generate_delete_blobs_subrequest_options(client, **options) + + req = HttpRequest( + "DELETE", + (f"{'/' + quote(url_prepend) if url_prepend else ''}/" + f"{quote(container_name)}/{quote(str(blob_name), safe='/~')}{query_str}"), + headers=header_parameters + ) + + req.format_parameters(query_parameters) + reqs.append(req) + + return reqs, kwargs + +# This code is a copy from _generated. +# Once Autorest is able to provide request preparation this code should be removed. +def _generate_set_tiers_subrequest_options( + client: AzureBlobStorage, + tier: Optional[Union["PremiumPageBlobTier", "StandardBlobTier", str]], + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + rehydrate_priority: Optional["RehydratePriority"] = None, + lease_access_conditions: Optional["LeaseAccessConditions"] = None, + **kwargs: Any +) -> Tuple[Dict[str, Any], Dict[str, Any]]: + if not tier: + raise ValueError("A blob tier must be specified") + if snapshot and version_id: + raise ValueError("Snapshot and version_id cannot be set at the same time") + if_tags = kwargs.pop('if_tags', None) + + lease_id = None + if lease_access_conditions is not None: + lease_id = lease_access_conditions.lease_id + + comp = "tier" + timeout = kwargs.pop('timeout', None) + # Construct parameters + query_parameters = {} + if snapshot is not None: + query_parameters['snapshot'] = client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access + if version_id is not None: + query_parameters['versionid'] = client._serialize.query("version_id", version_id, 'str') # pylint: disable=protected-access + if timeout is not None: + query_parameters['timeout'] = client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access + query_parameters['comp'] = client._serialize.query("comp", comp, 'str') # pylint: disable=protected-access + + # Construct headers + header_parameters = {} + header_parameters['x-ms-access-tier'] = client._serialize.header("tier", tier, 'str') # pylint: disable=protected-access + if rehydrate_priority is not None: + header_parameters['x-ms-rehydrate-priority'] = client._serialize.header( # pylint: disable=protected-access + "rehydrate_priority", rehydrate_priority, 'str') + if lease_id is not None: + header_parameters['x-ms-lease-id'] = client._serialize.header("lease_id", lease_id, 'str') # pylint: disable=protected-access + if if_tags is not None: + header_parameters['x-ms-if-tags'] = client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access + + return query_parameters, header_parameters + +def _generate_set_tiers_options( + query_str: str, + container_name: str, + blob_tier: Optional[Union["PremiumPageBlobTier", "StandardBlobTier", str]], + client: AzureBlobStorage, + *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any +) -> Tuple[List[HttpRequest], Dict[str, Any]]: + timeout = kwargs.pop('timeout', None) + raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) + rehydrate_priority = kwargs.pop('rehydrate_priority', None) + if_tags = kwargs.pop('if_tags_match_condition', None) + url_prepend = kwargs.pop('url_prepend', None) + kwargs.update({'raise_on_any_failure': raise_on_any_failure, + 'sas': query_str.replace('?', '&'), + 'timeout': '&timeout=' + str(timeout) if timeout else "", + 'path': container_name, + 'restype': 'restype=container&' + }) + + reqs = [] + for blob in blobs: + if not isinstance(blob, str): + blob_name = blob.get('name') + tier = blob_tier or blob.get('blob_tier') + query_parameters, header_parameters = _generate_set_tiers_subrequest_options( + client=client, + tier=tier, + snapshot=blob.get('snapshot'), + version_id=blob.get('version_id'), + rehydrate_priority=rehydrate_priority or blob.get('rehydrate_priority'), + lease_access_conditions=blob.get('lease_id'), + if_tags=if_tags or blob.get('if_tags_match_condition'), + timeout=timeout or blob.get('timeout') + ) + else: + blob_name = blob + query_parameters, header_parameters = _generate_set_tiers_subrequest_options( + client, blob_tier, rehydrate_priority=rehydrate_priority, if_tags=if_tags) + + req = HttpRequest( + "PUT", + (f"{'/' + quote(url_prepend) if url_prepend else ''}/" + f"{quote(container_name)}/{quote(str(blob_name), safe='/~')}{query_str}"), + headers=header_parameters + ) + req.format_parameters(query_parameters) + reqs.append(req) + + return reqs, kwargs diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_deserialize.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_deserialize.py index ae65c840f238..b6ee916097a1 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_deserialize.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_deserialize.py @@ -3,24 +3,42 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=no-self-use -from typing import ( # pylint: disable=unused-import - Tuple, Dict, List, - TYPE_CHECKING -) -from ._models import BlobType, CopyProperties, ContentSettings, LeaseProperties, BlobProperties +from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING +from urllib.parse import unquote +from xml.etree.ElementTree import Element + +from ._models import ( + BlobAnalyticsLogging, + BlobProperties, + BlobType, + ContainerProperties, + ContentSettings, + CopyProperties, + CorsRule, + ImmutabilityPolicy, + LeaseProperties, + Metrics, + ObjectReplicationPolicy, + ObjectReplicationRule, + RetentionPolicy, + StaticWebsite +) from ._shared.models import get_enum_value - from ._shared.response_handlers import deserialize_metadata -from ._models import ContainerProperties, BlobAnalyticsLogging, Metrics, CorsRule, RetentionPolicy, \ - StaticWebsite, ObjectReplicationPolicy, ObjectReplicationRule if TYPE_CHECKING: - from ._generated.models import PageList - + from azure.core.pipeline import PipelineResponse + from ._generated.models import ( + BlobItemInternal, + BlobTags, + PageList, + StorageServiceProperties, + StorageServiceStats, + ) + from ._shared.models import LocationMode -def deserialize_pipeline_response_into_cls(cls_method, response, obj, headers): +def deserialize_pipeline_response_into_cls(cls_method, response: "PipelineResponse", obj: Any, headers: Dict[str, Any]): try: deserialized_response = response.http_response except AttributeError: @@ -28,7 +46,7 @@ def deserialize_pipeline_response_into_cls(cls_method, response, obj, headers): return cls_method(deserialized_response, obj, headers) -def deserialize_blob_properties(response, obj, headers): +def deserialize_blob_properties(response: "PipelineResponse", obj: Any, headers: Dict[str, Any]) -> BlobProperties: blob_properties = BlobProperties( metadata=deserialize_metadata(response, obj, headers), object_replication_source_properties=deserialize_ors_policies(response.http_response.headers), @@ -42,7 +60,7 @@ def deserialize_blob_properties(response, obj, headers): return blob_properties -def deserialize_ors_policies(policy_dictionary): +def deserialize_ors_policies(policy_dictionary: Optional[Dict[str, str]]) -> Optional[List[ObjectReplicationPolicy]]: if policy_dictionary is None: return None @@ -52,7 +70,7 @@ def deserialize_ors_policies(policy_dictionary): or_policy_status_headers = {key: val for key, val in policy_dictionary.items() if 'or-' in key and key != 'x-ms-or-policy-id'} - parsed_result = {} + parsed_result: Dict[str, List[ObjectReplicationRule]] = {} for key, val in or_policy_status_headers.items(): # list blobs gives or-policy_rule and get blob properties gives x-ms-or-policy_rule @@ -69,13 +87,21 @@ def deserialize_ors_policies(policy_dictionary): return result_list -def deserialize_blob_stream(response, obj, headers): +def deserialize_blob_stream( + response: "PipelineResponse", + obj: Any, + headers: Dict[str, Any] +) -> Tuple["LocationMode", Any]: blob_properties = deserialize_blob_properties(response, obj, headers) obj.properties = blob_properties return response.http_response.location_mode, obj -def deserialize_container_properties(response, obj, headers): +def deserialize_container_properties( + response: "PipelineResponse", + obj: Any, + headers: Dict[str, Any] +) -> ContainerProperties: metadata = deserialize_metadata(response, obj, headers) container_properties = ContainerProperties( metadata=metadata, @@ -84,65 +110,70 @@ def deserialize_container_properties(response, obj, headers): return container_properties -def get_page_ranges_result(ranges): - # type: (PageList) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] - page_range = [] # type: ignore - clear_range = [] # type: List +def get_page_ranges_result(ranges: "PageList") -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: + page_range = [] + clear_range = [] if ranges.page_range: - page_range = [{'start': b.start, 'end': b.end} for b in ranges.page_range] # type: ignore + page_range = [{'start': b.start, 'end': b.end} for b in ranges.page_range] if ranges.clear_range: clear_range = [{'start': b.start, 'end': b.end} for b in ranges.clear_range] - return page_range, clear_range # type: ignore + return page_range, clear_range -def service_stats_deserialize(generated): - """Deserialize a ServiceStats objects into a dict. - """ +def service_stats_deserialize(generated: "StorageServiceStats") -> Dict[str, Any]: + status = None + last_sync_time = None + if generated.geo_replication is not None: + status = generated.geo_replication.status + last_sync_time = generated.geo_replication.last_sync_time return { 'geo_replication': { - 'status': generated.geo_replication.status, - 'last_sync_time': generated.geo_replication.last_sync_time, + 'status': status, + 'last_sync_time': last_sync_time } } - -def service_properties_deserialize(generated): - """Deserialize a ServiceProperties objects into a dict. - """ +def service_properties_deserialize(generated: "StorageServiceProperties") -> Dict[str, Any]: + cors_list = None + if generated.cors is not None: + cors_list = [CorsRule._from_generated(cors) for cors in generated.cors] # pylint: disable=protected-access return { 'analytics_logging': BlobAnalyticsLogging._from_generated(generated.logging), # pylint: disable=protected-access 'hour_metrics': Metrics._from_generated(generated.hour_metrics), # pylint: disable=protected-access 'minute_metrics': Metrics._from_generated(generated.minute_metrics), # pylint: disable=protected-access - 'cors': [CorsRule._from_generated(cors) for cors in generated.cors], # pylint: disable=protected-access - 'target_version': generated.default_service_version, # pylint: disable=protected-access + 'cors': cors_list, + 'target_version': generated.default_service_version, 'delete_retention_policy': RetentionPolicy._from_generated(generated.delete_retention_policy), # pylint: disable=protected-access 'static_website': StaticWebsite._from_generated(generated.static_website), # pylint: disable=protected-access } -def get_blob_properties_from_generated_code(generated): +def get_blob_properties_from_generated_code(generated: "BlobItemInternal") -> BlobProperties: blob = BlobProperties() - blob.name = generated.name + if generated.name.encoded and generated.name.content is not None: + blob.name = unquote(generated.name.content) + else: + blob.name = generated.name.content #type: ignore blob_type = get_enum_value(generated.properties.blob_type) - blob.blob_type = BlobType(blob_type) if blob_type else None + blob.blob_type = BlobType(blob_type) blob.etag = generated.properties.etag blob.deleted = generated.deleted blob.snapshot = generated.snapshot blob.is_append_blob_sealed = generated.properties.is_sealed - blob.metadata = generated.metadata.additional_properties if generated.metadata else {} + blob.metadata = generated.metadata.additional_properties if generated.metadata else {} # type: ignore [assignment] blob.encrypted_metadata = generated.metadata.encrypted if generated.metadata else None blob.lease = LeaseProperties._from_generated(generated) # pylint: disable=protected-access blob.copy = CopyProperties._from_generated(generated) # pylint: disable=protected-access blob.last_modified = generated.properties.last_modified - blob.creation_time = generated.properties.creation_time + blob.creation_time = generated.properties.creation_time # type: ignore [assignment] blob.content_settings = ContentSettings._from_generated(generated) # pylint: disable=protected-access - blob.size = generated.properties.content_length + blob.size = generated.properties.content_length # type: ignore [assignment] blob.page_blob_sequence_number = generated.properties.blob_sequence_number - blob.server_encrypted = generated.properties.server_encrypted + blob.server_encrypted = generated.properties.server_encrypted # type: ignore [assignment] blob.encryption_scope = generated.properties.encryption_scope blob.deleted_time = generated.properties.deleted_time blob.remaining_retention_days = generated.properties.remaining_retention_days - blob.blob_tier = generated.properties.access_tier + blob.blob_tier = generated.properties.access_tier # type: ignore [assignment] blob.rehydrate_priority = generated.properties.rehydrate_priority blob.blob_tier_inferred = generated.properties.access_tier_inferred blob.archive_status = generated.properties.archive_status @@ -150,17 +181,54 @@ def get_blob_properties_from_generated_code(generated): blob.version_id = generated.version_id blob.is_current_version = generated.is_current_version blob.tag_count = generated.properties.tag_count - blob.tags = parse_tags(generated.blob_tags) # pylint: disable=protected-access + blob.tags = parse_tags(generated.blob_tags) blob.object_replication_source_properties = deserialize_ors_policies(generated.object_replication_metadata) blob.last_accessed_on = generated.properties.last_accessed_on + blob.immutability_policy = ImmutabilityPolicy._from_generated(generated) # pylint: disable=protected-access + blob.has_legal_hold = generated.properties.legal_hold + blob.has_versions_only = generated.has_versions_only return blob - -def parse_tags(generated_tags): - # type: (Optional[List[BlobTag]]) -> Union[Dict[str, str], None] +def parse_tags(generated_tags: Optional["BlobTags"]) -> Optional[Dict[str, str]]: """Deserialize a list of BlobTag objects into a dict. + + :param Optional[BlobTags] generated_tags: + A list containing the BlobTag objects from generated code. + :returns: A dictionary of the BlobTag objects. + :rtype: Optional[Dict[str, str]] """ if generated_tags: tag_dict = {t.key: t.value for t in generated_tags.blob_tag_set} return tag_dict return None + + +def load_single_xml_node(element: Element, name: str) -> Optional[Element]: + return element.find(name) + + +def load_many_xml_nodes( + element: Element, + name: str, + wrapper: Optional[str] = None +) -> List[Optional[Element]]: + found_element: Optional[Element] = element + if wrapper: + found_element = load_single_xml_node(element, wrapper) + if found_element is None: + return [] + return list(found_element.findall(name)) + + +def load_xml_string(element: Element, name: str) -> Optional[str]: + node = element.find(name) + if node is None or not node.text: + return None + return node.text + + +def load_xml_int(element: Element, name: str) -> Optional[int]: + node = element.find(name) + if node is None or not node.text: + return None + return int(node.text) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_download.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_download.py index 46e59e5d2492..8b8f428f845b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_download.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_download.py @@ -3,54 +3,72 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - +import codecs import sys import threading +import time import warnings -from io import BytesIO - -from azure.core.exceptions import HttpResponseError +from io import BytesIO, StringIO +from typing import ( + Any, Callable, cast, Dict, Generator, + Generic, IO, Iterator, List, Optional, + overload, Tuple, TypeVar, Union, TYPE_CHECKING +) + +from azure.core.exceptions import DecodeError, HttpResponseError, IncompleteReadError from azure.core.tracing.common import with_current_context -from ._shared.encryption import decrypt_blob -from ._shared.request_handlers import validate_and_format_range_headers -from ._shared.response_handlers import process_storage_error, parse_length_from_content_range -from ._deserialize import get_page_ranges_result - -def process_range_and_offset(start_range, end_range, length, encryption): +from ._shared.request_handlers import validate_and_format_range_headers +from ._shared.response_handlers import parse_length_from_content_range, process_storage_error +from ._deserialize import deserialize_blob_properties, get_page_ranges_result +from ._encryption import ( + adjust_blob_size_for_encryption, + decrypt_blob, + get_adjusted_download_range_and_offset, + is_encryption_v2, + parse_encryption_data +) + +if TYPE_CHECKING: + from codecs import IncrementalDecoder + from ._encryption import _EncryptionData + from ._generated import AzureBlobStorage + from ._generated.operations import BlobOperations + from ._models import BlobProperties + from ._shared.models import StorageConfiguration + + +T = TypeVar('T', bytes, str) + + +def process_range_and_offset( + start_range: int, + end_range: int, + length: Optional[int], + encryption_options: Dict[str, Any], + encryption_data: Optional["_EncryptionData"] +) -> Tuple[Tuple[int, int], Tuple[int, int]]: start_offset, end_offset = 0, 0 - if encryption.get("key") is not None or encryption.get("resolver") is not None: - if start_range is not None: - # Align the start of the range along a 16 byte block - start_offset = start_range % 16 - start_range -= start_offset - - # Include an extra 16 bytes for the IV if necessary - # Because of the previous offsetting, start_range will always - # be a multiple of 16. - if start_range > 0: - start_offset += 16 - start_range -= 16 - - if length is not None: - # Align the end of the range along a 16 byte block - end_offset = 15 - (end_range % 16) - end_range += end_offset + if encryption_options.get("key") is not None or encryption_options.get("resolver") is not None: + return get_adjusted_download_range_and_offset( + start_range, + end_range, + length, + encryption_data) return (start_range, end_range), (start_offset, end_offset) -def process_content(data, start_offset, end_offset, encryption): +def process_content(data: Any, start_offset: int, end_offset: int, encryption: Dict[str, Any]) -> bytes: if data is None: raise ValueError("Response cannot be None.") - try: - content = b"".join(list(data)) - except Exception as error: - raise HttpResponseError(message="Download stream interrupted.", response=data.response, error=error) + + content = b"".join(list(data)) + if content and encryption.get("key") is not None or encryption.get("resolver") is not None: try: return decrypt_blob( - encryption.get("required"), + encryption.get("required") or False, encryption.get("key"), encryption.get("resolver"), content, @@ -59,26 +77,28 @@ def process_content(data, start_offset, end_offset, encryption): data.response.headers, ) except Exception as error: - raise HttpResponseError(message="Decryption failed.", response=data.response, error=error) + raise HttpResponseError(message="Decryption failed.", response=data.response, error=error) from error return content class _ChunkDownloader(object): # pylint: disable=too-many-instance-attributes def __init__( self, - client=None, - non_empty_ranges=None, - total_size=None, - chunk_size=None, - current_progress=None, - start_range=None, - end_range=None, - stream=None, - parallel=None, - validate_content=None, - encryption_options=None, - **kwargs - ): + client: "BlobOperations", + total_size: int, + chunk_size: int, + current_progress: int, + start_range: int, + end_range: int, + validate_content: bool, + encryption_options: Dict[str, Any], + encryption_data: Optional["_EncryptionData"] = None, + stream: Any = None, + parallel: Optional[int] = None, + non_empty_ranges: Optional[List[Dict[str, Any]]] = None, + progress_hook: Optional[Callable[[int, Optional[int]], None]] = None, + **kwargs: Any + ) -> None: self.client = client self.non_empty_ranges = non_empty_ranges @@ -92,54 +112,59 @@ def __init__( self.stream = stream self.stream_lock = threading.Lock() if parallel else None self.progress_lock = threading.Lock() if parallel else None + self.progress_hook = progress_hook # For a parallel download, the stream is always seekable, so we note down the current position # in order to seek to the right place when out-of-order chunks come in - self.stream_start = stream.tell() if parallel else None + self.stream_start = stream.tell() if parallel else 0 # Download progress so far self.progress_total = current_progress # Encryption self.encryption_options = encryption_options + self.encryption_data = encryption_data # Parameters for each get operation self.validate_content = validate_content self.request_options = kwargs - def _calculate_range(self, chunk_start): + def _calculate_range(self, chunk_start: int) -> Tuple[int, int]: if chunk_start + self.chunk_size > self.end_index: chunk_end = self.end_index else: chunk_end = chunk_start + self.chunk_size return chunk_start, chunk_end - def get_chunk_offsets(self): + def get_chunk_offsets(self) -> Generator[int, None, None]: index = self.start_index while index < self.end_index: yield index index += self.chunk_size - def process_chunk(self, chunk_start): + def process_chunk(self, chunk_start: int) -> None: chunk_start, chunk_end = self._calculate_range(chunk_start) - chunk_data = self._download_chunk(chunk_start, chunk_end - 1) + chunk_data, _ = self._download_chunk(chunk_start, chunk_end - 1) length = chunk_end - chunk_start if length > 0: self._write_to_stream(chunk_data, chunk_start) self._update_progress(length) - def yield_chunk(self, chunk_start): + def yield_chunk(self, chunk_start: int) -> Tuple[bytes, int]: chunk_start, chunk_end = self._calculate_range(chunk_start) return self._download_chunk(chunk_start, chunk_end - 1) - def _update_progress(self, length): + def _update_progress(self, length: int) -> None: if self.progress_lock: with self.progress_lock: # pylint: disable=not-context-manager self.progress_total += length else: self.progress_total += length - def _write_to_stream(self, chunk_data, chunk_start): + if self.progress_hook: + self.progress_hook(self.progress_total, self.total_size) + + def _write_to_stream(self, chunk_data: bytes, chunk_start: int) -> None: if self.stream_lock: with self.stream_lock: # pylint: disable=not-context-manager self.stream.seek(self.stream_start + (chunk_start - self.start_index)) @@ -147,7 +172,7 @@ def _write_to_stream(self, chunk_data, chunk_start): else: self.stream.write(chunk_data) - def _do_optimize(self, given_range_start, given_range_end): + def _do_optimize(self, given_range_start: int, given_range_end: int) -> bool: # If we have no page range list stored, then assume there's data everywhere for that page blob # or it's a block blob or append blob if self.non_empty_ranges is None: @@ -172,15 +197,18 @@ def _do_optimize(self, given_range_start, given_range_end): # Went through all src_ranges, but nothing overlapped. Optimization will be applied. return True - def _download_chunk(self, chunk_start, chunk_end): + def _download_chunk(self, chunk_start: int, chunk_end: int) -> Tuple[bytes, int]: + if self.encryption_options is None: + raise ValueError("Required argument is missing: encryption_options") download_range, offset = process_range_and_offset( - chunk_start, chunk_end, chunk_end, self.encryption_options + chunk_start, chunk_end, chunk_end, self.encryption_options, self.encryption_data ) # No need to download the empty chunk from server if there's no data in the chunk to be downloaded. # Do optimize and create empty chunk locally if condition is met. if self._do_optimize(download_range[0], download_range[1]): - chunk_data = b"\x00" * self.chunk_size + content_length = download_range[1] - download_range[0] + 1 + chunk_data = b"\x00" * content_length else: range_header, range_validation = validate_and_format_range_headers( download_range[0], @@ -188,98 +216,130 @@ def _download_chunk(self, chunk_start, chunk_end): check_content_md5=self.validate_content ) - try: - _, response = self.client.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self.validate_content, - data_stream_total=self.total_size, - download_stream_current=self.progress_total, - **self.request_options - ) - except HttpResponseError as error: - process_storage_error(error) + retry_active = True + retry_total = 3 + while retry_active: + response: Any = None + try: + _, response = self.client.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self.validate_content, + data_stream_total=self.total_size, + download_stream_current=self.progress_total, + **self.request_options + ) + except HttpResponseError as error: + process_storage_error(error) - chunk_data = process_content(response, offset[0], offset[1], self.encryption_options) + try: + chunk_data = process_content(response, offset[0], offset[1], self.encryption_options) + retry_active = False + except (IncompleteReadError, HttpResponseError, DecodeError) as error: + retry_total -= 1 + if retry_total <= 0: + raise HttpResponseError(error, error=error) from error + time.sleep(1) + content_length = response.content_length # This makes sure that if_match is set so that we can validate # that subsequent downloads are to an unmodified blob if self.request_options.get("modified_access_conditions"): self.request_options["modified_access_conditions"].if_match = response.properties.etag - return chunk_data + return chunk_data, content_length class _ChunkIterator(object): - """Async iterator for chunks in blob download stream.""" + """Iterator for chunks in blob download stream.""" - def __init__(self, size, content, downloader): + def __init__(self, size: int, content: bytes, downloader: Optional[_ChunkDownloader], chunk_size: int) -> None: self.size = size + self._chunk_size = chunk_size self._current_content = content self._iter_downloader = downloader - self._iter_chunks = None - self._complete = (size == 0) + self._iter_chunks: Optional[Generator[int, None, None]] = None + self._complete = size == 0 - def __len__(self): + def __len__(self) -> int: return self.size - def __iter__(self): + def __iter__(self) -> Iterator[bytes]: return self - def __next__(self): - """Iterate through responses.""" + # Iterate through responses. + def __next__(self) -> bytes: if self._complete: raise StopIteration("Download complete") if not self._iter_downloader: - # If no iterator was supplied, the download completed with - # the initial GET, so we just return that data + # cut the data obtained from initial GET into chunks + if len(self._current_content) > self._chunk_size: + return self._get_chunk_data() self._complete = True return self._current_content if not self._iter_chunks: self._iter_chunks = self._iter_downloader.get_chunk_offsets() - else: - chunk = next(self._iter_chunks) - self._current_content = self._iter_downloader.yield_chunk(chunk) - return self._current_content + # initial GET result still has more than _chunk_size bytes of data + if len(self._current_content) >= self._chunk_size: + return self._get_chunk_data() + + try: + next_chunk = next(self._iter_chunks) + self._current_content += self._iter_downloader.yield_chunk(next_chunk)[0] + except StopIteration as e: + self._complete = True + if self._current_content: + return self._current_content + raise e + + # the current content from the first get is still there but smaller than chunk size + # therefore we want to make sure its also included + return self._get_chunk_data() next = __next__ # Python 2 compatibility. + def _get_chunk_data(self) -> bytes: + chunk_data = self._current_content[: self._chunk_size] + self._current_content = self._current_content[self._chunk_size:] + return chunk_data -class StorageStreamDownloader(object): # pylint: disable=too-many-instance-attributes - """A streaming object to download from Azure Storage. - :ivar str name: - The name of the blob being downloaded. - :ivar str container: - The name of the container where the blob is. - :ivar ~azure.storage.blob.BlobProperties properties: - The properties of the blob being downloaded. If only a range of the data is being - downloaded, this will be reflected in the properties. - :ivar int size: - The size of the total data in the stream. This will be the byte range if specified, - otherwise the total size of the blob. +class StorageStreamDownloader(Generic[T]): # pylint: disable=too-many-instance-attributes + """ + A streaming object to download from Azure Storage. """ + name: str + """The name of the blob being downloaded.""" + container: str + """The name of the container where the blob is.""" + properties: "BlobProperties" + """The properties of the blob being downloaded. If only a range of the data is being + downloaded, this will be reflected in the properties.""" + size: int + """The size of the total data in the stream. This will be the byte range if specified, + otherwise the total size of the blob.""" + def __init__( self, - clients=None, - config=None, - start_range=None, - end_range=None, - validate_content=None, - encryption_options=None, - max_concurrency=1, - name=None, - container=None, - encoding=None, - **kwargs - ): + clients: "AzureBlobStorage" = None, # type: ignore [assignment] + config: "StorageConfiguration" = None, # type: ignore [assignment] + start_range: Optional[int] = None, + end_range: Optional[int] = None, + validate_content: bool = None, # type: ignore [assignment] + encryption_options: Dict[str, Any] = None, # type: ignore [assignment] + max_concurrency: int = 1, + name: str = None, # type: ignore [assignment] + container: str = None, # type: ignore [assignment] + encoding: Optional[str] = None, + download_cls: Optional[Callable] = None, + **kwargs: Any + ) -> None: self.name = name self.container = container - self.properties = None - self.size = None + self.size = 0 self._clients = clients self._config = config @@ -289,64 +349,97 @@ def __init__( self._encoding = encoding self._validate_content = validate_content self._encryption_options = encryption_options or {} + self._progress_hook = kwargs.pop('progress_hook', None) self._request_options = kwargs + self._response = None self._location_mode = None - self._download_complete = False - self._current_content = None - self._file_size = None + self._current_content: Union[str, bytes] = b'' + self._file_size = 0 self._non_empty_ranges = None - self._response = None + self._encryption_data: Optional["_EncryptionData"] = None + + # The content download offset, after any processing (decryption), in bytes + self._download_offset = 0 + # The raw download offset, before processing (decryption), in bytes + self._raw_download_offset = 0 + # The offset the stream has been read to in bytes or chars depending on mode + self._read_offset = 0 + # The offset into current_content that has been consumed in bytes or chars depending on mode + self._current_content_offset = 0 + + self._text_mode: Optional[bool] = None + self._decoder: Optional["IncrementalDecoder"] = None + # Whether the current content is the first chunk of download content or not + self._first_chunk = True + self._download_start = self._start_range or 0 + + # The cls is passed in via download_cls to avoid conflicting arg name with Generic.__new__ + # but needs to be changed to cls in the request options. + self._request_options['cls'] = download_cls + + if self._encryption_options.get("key") is not None or self._encryption_options.get("resolver") is not None: + self._get_encryption_data_request() # The service only provides transactional MD5s for chunks under 4MB. # If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first # chunk so a transactional MD5 can be retrieved. - self._first_get_size = ( + first_get_size = ( self._config.max_single_get_size if not self._validate_content else self._config.max_chunk_get_size ) - initial_request_start = self._start_range if self._start_range is not None else 0 - if self._end_range is not None and self._end_range - self._start_range < self._first_get_size: + initial_request_start = self._download_start + if self._end_range is not None and self._end_range - initial_request_start < first_get_size: initial_request_end = self._end_range else: - initial_request_end = initial_request_start + self._first_get_size - 1 + initial_request_end = initial_request_start + first_get_size - 1 self._initial_range, self._initial_offset = process_range_and_offset( - initial_request_start, initial_request_end, self._end_range, self._encryption_options + initial_request_start, + initial_request_end, + self._end_range, + self._encryption_options, + self._encryption_data ) self._response = self._initial_request() - self.properties = self._response.properties + self.properties = cast("BlobProperties", self._response.properties) self.properties.name = self.name self.properties.container = self.container - # Set the content length to the download size instead of the size of - # the last range + # Set the content length to the download size instead of the size of the last range self.properties.size = self.size - - # Overwrite the content range to the user requested range - self.properties.content_range = "bytes {0}-{1}/{2}".format( - self._start_range, - self._end_range, - self._file_size - ) + self.properties.content_range = (f"bytes {self._download_start}-" + f"{self._end_range if self._end_range is not None else self._file_size - 1}/" + f"{self._file_size}") # Overwrite the content MD5 as it is the MD5 for the last range instead # of the stored MD5 # TODO: Set to the stored MD5 when the service returns this - self.properties.content_md5 = None - - if self.size == 0: - self._current_content = b"" - else: - self._current_content = process_content( - self._response, - self._initial_offset[0], - self._initial_offset[1], - self._encryption_options - ) + self.properties.content_md5 = None # type: ignore [attr-defined] def __len__(self): return self.size + def _get_encryption_data_request(self) -> None: + # Save current request cls + download_cls = self._request_options.pop('cls', None) + # Adjust cls for get_properties + self._request_options['cls'] = deserialize_blob_properties + + properties = cast("BlobProperties", self._clients.blob.get_properties(**self._request_options)) + # This will return None if there is no encryption metadata or there are parsing errors. + # That is acceptable here, the proper error will be caught and surfaced when attempting + # to decrypt the blob. + self._encryption_data = parse_encryption_data(properties.metadata) + + # Restore cls for download + self._request_options['cls'] = download_cls + + @property + def _download_complete(self): + if is_encryption_v2(self._encryption_data): + return self._download_offset >= self.size + return self._raw_download_offset >= self.size + def _initial_request(self): range_header, range_validation = validate_and_format_range_headers( self._initial_range[0], @@ -356,51 +449,78 @@ def _initial_request(self): check_content_md5=self._validate_content ) - try: - location_mode, response = self._clients.blob.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self._validate_content, - data_stream_total=None, - download_stream_current=0, - **self._request_options - ) + retry_active = True + retry_total = 3 + while retry_active: + try: + location_mode, response = cast(Tuple[Optional[str], Any], self._clients.blob.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self._validate_content, + data_stream_total=None, + download_stream_current=0, + **self._request_options + )) - # Check the location we read from to ensure we use the same one - # for subsequent requests. - self._location_mode = location_mode - - # Parse the total file size and adjust the download size if ranges - # were specified - self._file_size = parse_length_from_content_range(response.properties.content_range) - if self._end_range is not None: - # Use the end range index unless it is over the end of the file - self.size = min(self._file_size, self._end_range - self._start_range + 1) - elif self._start_range is not None: - self.size = self._file_size - self._start_range - else: - self.size = self._file_size + # Check the location we read from to ensure we use the same one + # for subsequent requests. + self._location_mode = location_mode + + # Parse the total file size and adjust the download size if ranges + # were specified + self._file_size = parse_length_from_content_range(response.properties.content_range) + if self._file_size is None: + raise ValueError("Required Content-Range response header is missing or malformed.") + # Remove any extra encryption data size from blob size + self._file_size = adjust_blob_size_for_encryption(self._file_size, self._encryption_data) + + if self._end_range is not None and self._start_range is not None: + # Use the end range index unless it is over the end of the file + self.size = min(self._file_size - self._start_range, self._end_range - self._start_range + 1) + elif self._start_range is not None: + self.size = self._file_size - self._start_range + else: + self.size = self._file_size - except HttpResponseError as error: - if self._start_range is None and error.response.status_code == 416: - # Get range will fail on an empty file. If the user did not - # request a range, do a regular get request in order to get - # any properties. - try: - _, response = self._clients.blob.download( - validate_content=self._validate_content, - data_stream_total=0, - download_stream_current=0, - **self._request_options - ) - except HttpResponseError as error: + except HttpResponseError as error: + if self._start_range is None and error.response and error.response.status_code == 416: + # Get range will fail on an empty file. If the user did not + # request a range, do a regular get request in order to get + # any properties. + try: + _, response = self._clients.blob.download( + validate_content=self._validate_content, + data_stream_total=0, + download_stream_current=0, + **self._request_options + ) + except HttpResponseError as e: + process_storage_error(e) + + # Set the download size to empty + self.size = 0 + self._file_size = 0 + else: process_storage_error(error) - # Set the download size to empty - self.size = 0 - self._file_size = 0 - else: - process_storage_error(error) + try: + if self.size == 0: + self._current_content = b"" + else: + self._current_content = process_content( + response, + self._initial_offset[0], + self._initial_offset[1], + self._encryption_options + ) + retry_active = False + except (IncompleteReadError, HttpResponseError, DecodeError) as error: + retry_total -= 1 + if retry_total <= 0: + raise HttpResponseError(error, error=error) from error + time.sleep(1) + self._download_offset += len(self._current_content) + self._raw_download_offset += response.content_length # get page ranges to optimize downloading sparse page blob if response.properties.blob_type == 'PageBlob': @@ -414,104 +534,249 @@ def _initial_request(self): except HttpResponseError: pass - # If the file is small, the download is complete at this point. - # If file size is large, download the rest of the file in chunks. - if response.properties.size != self.size: - # Lock on the etag. This can be overriden by the user by specifying '*' - if self._request_options.get("modified_access_conditions"): - if not self._request_options["modified_access_conditions"].if_match: - self._request_options["modified_access_conditions"].if_match = response.properties.etag - else: - self._download_complete = True + if not self._download_complete and self._request_options.get("modified_access_conditions"): + self._request_options["modified_access_conditions"].if_match = response.properties.etag + return response - def chunks(self): - if self.size == 0 or self._download_complete: - iter_downloader = None - else: - data_end = self._file_size - if self._end_range is not None: - # Use the end range index unless it is over the end of the file - data_end = min(self._file_size, self._end_range + 1) + def chunks(self) -> Iterator[bytes]: + """ + Iterate over chunks in the download stream. Note, the iterator returned will + iterate over the entire download content, regardless of any data that was + previously read. + + NOTE: If the stream has been partially read, some data may be re-downloaded by the iterator. + + :returns: An iterator of the chunks in the download stream. + :rtype: Iterator[bytes] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world.py + :start-after: [START download_a_blob_in_chunk] + :end-before: [END download_a_blob_in_chunk] + :language: python + :dedent: 12 + :caption: Download a blob using chunks(). + """ + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. chunks is not supported in text mode.") + if self._encoding: + warnings.warn("Encoding is ignored with chunks as only bytes are supported.") + + iter_downloader = None + # If we still have the first chunk buffered, use it. Otherwise, download all content again + if not self._first_chunk or not self._download_complete: + if self._first_chunk: + start = self._download_start + len(self._current_content) + current_progress = len(self._current_content) + else: + start = self._download_start + current_progress = 0 + + end = self._download_start + self.size + iter_downloader = _ChunkDownloader( client=self._clients.blob, non_empty_ranges=self._non_empty_ranges, total_size=self.size, chunk_size=self._config.max_chunk_get_size, - current_progress=self._first_get_size, - start_range=self._initial_range[1] + 1, # start where the first download ended - end_range=data_end, - stream=None, - parallel=False, + current_progress=current_progress, + start_range=start, + end_range=end, validate_content=self._validate_content, encryption_options=self._encryption_options, + encryption_data=self._encryption_data, use_location=self._location_mode, **self._request_options ) + + initial_content = self._current_content if self._first_chunk else b'' return _ChunkIterator( size=self.size, - content=self._current_content, - downloader=iter_downloader) + content=cast(bytes, initial_content), + downloader=iter_downloader, + chunk_size=self._config.max_chunk_get_size) - def readall(self): - """Download the contents of this blob. + @overload + def read(self, size: int = -1) -> T: + ... - This operation is blocking until all data is downloaded. + @overload + def read(self, *, chars: Optional[int] = None) -> T: + ... - :rtype: bytes or str + # pylint: disable-next=too-many-statements,too-many-branches + def read(self, size: int = -1, *, chars: Optional[int] = None) -> T: """ - stream = BytesIO() - self.readinto(stream) - data = stream.getvalue() - if self._encoding: - return data.decode(self._encoding) - return data + Read the specified bytes or chars from the stream. If `encoding` + was specified on `download_blob`, it is recommended to use the + chars parameter to read a specific number of chars to avoid decoding + errors. If size/chars is unspecified or negative all bytes will be read. + + :param int size: + The number of bytes to download from the stream. Leave unspecified + or set negative to download all bytes. + :keyword Optional[int] chars: + The number of chars to download from the stream. Leave unspecified + or set negative to download all chars. Note, this can only be used + when encoding is specified on `download_blob`. + :returns: + The requested data as bytes or a string if encoding was specified. If + the return value is empty, there is no more data to read. + :rtype: T + """ + if size > -1 and self._encoding: + warnings.warn( + "Size parameter specified with text encoding enabled. It is recommended to use chars " + "to read a specific number of characters instead." + ) + if size > -1 and chars is not None: + raise ValueError("Cannot specify both size and chars.") + if not self._encoding and chars is not None: + raise ValueError("Must specify encoding to read chars.") + if self._text_mode and size > -1: + raise ValueError("Stream has been partially read in text mode. Please use chars.") + if self._text_mode is False and chars is not None: + raise ValueError("Stream has been partially read in bytes mode. Please use size.") + + # Empty blob or already read to the end + if (size == 0 or chars == 0 or + (self._download_complete and self._current_content_offset >= len(self._current_content))): + return b'' if not self._encoding else '' # type: ignore [return-value] + + if not self._text_mode and chars is not None and self._encoding is not None: + self._text_mode = True + self._decoder = codecs.getincrementaldecoder(self._encoding)('strict') + self._current_content = self._decoder.decode( + cast(bytes, self._current_content), final=self._download_complete) + elif self._text_mode is None: + self._text_mode = False + + output_stream: Union[BytesIO, StringIO] + if self._text_mode: + output_stream = StringIO() + size = chars if chars else sys.maxsize + else: + output_stream = BytesIO() + size = size if size > 0 else sys.maxsize + readall = size == sys.maxsize + count = 0 + + # Start by reading from current_content + start = self._current_content_offset + length = min(len(self._current_content) - self._current_content_offset, size - count) + read = output_stream.write(self._current_content[start:start + length]) # type: ignore [arg-type] + + count += read + self._current_content_offset += read + self._read_offset += read + self._check_and_report_progress() + + remaining = size - count + if remaining > 0 and not self._download_complete: + # Create a downloader than can download the rest of the file + start = self._download_start + self._download_offset + end = self._download_start + self.size + + parallel = self._max_concurrency > 1 + downloader = _ChunkDownloader( + client=self._clients.blob, + non_empty_ranges=self._non_empty_ranges, + total_size=self.size, + chunk_size=self._config.max_chunk_get_size, + current_progress=self._read_offset, + start_range=start, + end_range=end, + stream=output_stream, + parallel=parallel, + validate_content=self._validate_content, + encryption_options=self._encryption_options, + encryption_data=self._encryption_data, + use_location=self._location_mode, + progress_hook=self._progress_hook, + **self._request_options + ) + self._first_chunk = False + + # When reading all data, have the downloader read everything into the stream. + # Else, read one chunk at a time (using the downloader as an iterator) until + # the requested size is reached. + chunks_iter = downloader.get_chunk_offsets() + if readall and not self._text_mode: + # Only do parallel if there is more than one chunk left to download + if parallel and (self.size - self._download_offset) > self._config.max_chunk_get_size: + import concurrent.futures + with concurrent.futures.ThreadPoolExecutor(self._max_concurrency) as executor: + list(executor.map( + with_current_context(downloader.process_chunk), + downloader.get_chunk_offsets() + )) + else: + for next_chunk in chunks_iter: + downloader.process_chunk(next_chunk) + + self._complete_read() - def content_as_bytes(self, max_concurrency=1): - """Download the contents of this file. + else: + while (chunk := next(chunks_iter, None)) is not None and remaining > 0: + chunk_data, content_length = downloader.yield_chunk(chunk) + self._download_offset += len(chunk_data) + self._raw_download_offset += content_length + if self._text_mode and self._decoder is not None: + self._current_content = self._decoder.decode(chunk_data, final=self._download_complete) + else: + self._current_content = chunk_data + + if remaining < len(self._current_content): + read = output_stream.write(self._current_content[:remaining]) # type: ignore [arg-type] + else: + read = output_stream.write(self._current_content) # type: ignore [arg-type] + + self._current_content_offset = read + self._read_offset += read + remaining -= read + self._check_and_report_progress() + + data = output_stream.getvalue() + if not self._text_mode and self._encoding: + try: + # This is technically incorrect to do, but we have it for backwards compatibility. + data = cast(bytes, data).decode(self._encoding) + except UnicodeDecodeError: + warnings.warn( + "Encountered a decoding error while decoding blob data from a partial read. " + "Try using the `chars` keyword instead to read in text mode." + ) + raise - This operation is blocking until all data is downloaded. + return data # type: ignore [return-value] - :keyword int max_concurrency: - The number of parallel connections with which to download. - :rtype: bytes + def readall(self) -> T: """ - warnings.warn( - "content_as_bytes is deprecated, use readall instead", - DeprecationWarning - ) - self._max_concurrency = max_concurrency - return self.readall() - - def content_as_text(self, max_concurrency=1, encoding="UTF-8"): - """Download the contents of this blob, and decode as text. - + Read the entire contents of this blob. This operation is blocking until all data is downloaded. - :keyword int max_concurrency: - The number of parallel connections with which to download. - :param str encoding: - Test encoding to decode the downloaded bytes. Default is UTF-8. - :rtype: str + :returns: The requested data as bytes or a string if encoding was specified. + :rtype: T """ - warnings.warn( - "content_as_text is deprecated, use readall instead", - DeprecationWarning - ) - self._max_concurrency = max_concurrency - self._encoding = encoding - return self.readall() + return self.read() - def readinto(self, stream): + def readinto(self, stream: IO[bytes]) -> int: """Download the contents of this file to a stream. - :param stream: + :param IO[bytes] stream: The stream to download to. This can be an open file-handle, or any writable stream. The stream must be seekable if the download uses more than one parallel connection. :returns: The number of bytes read. :rtype: int """ + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. readinto is not supported in text mode.") + if self._encoding: + warnings.warn("Encoding is ignored with readinto as only byte streams are supported.") + # The stream must be seekable if parallel download is required parallel = self._max_concurrency > 1 if parallel: @@ -521,53 +786,137 @@ def readinto(self, stream): try: stream.seek(stream.tell()) - except (NotImplementedError, AttributeError): - raise ValueError(error_message) - - # Write the content to the user stream - stream.write(self._current_content) + except (NotImplementedError, AttributeError) as exc: + raise ValueError(error_message) from exc + + # If some data has been streamed using `read`, only stream the remaining data + remaining_size = self.size - self._read_offset + # Already read to the end + if remaining_size <= 0: + return 0 + + # Write the current content to the user stream + current_remaining = len(self._current_content) - self._current_content_offset + start = self._current_content_offset + count = stream.write(cast(bytes, self._current_content[start:start + current_remaining])) + + self._current_content_offset += count + self._read_offset += count + if self._progress_hook: + self._progress_hook(self._read_offset, self.size) + + # If all the data was already downloaded/buffered if self._download_complete: - return self.size + return remaining_size - data_end = self._file_size - if self._end_range is not None: - # Use the length unless it is over the end of the file - data_end = min(self._file_size, self._end_range + 1) + data_start = self._download_start + self._read_offset + data_end = self._download_start + self.size downloader = _ChunkDownloader( client=self._clients.blob, non_empty_ranges=self._non_empty_ranges, total_size=self.size, chunk_size=self._config.max_chunk_get_size, - current_progress=self._first_get_size, - start_range=self._initial_range[1] + 1, # Start where the first download ended + current_progress=self._read_offset, + start_range=data_start, end_range=data_end, stream=stream, parallel=parallel, validate_content=self._validate_content, encryption_options=self._encryption_options, + encryption_data=self._encryption_data, use_location=self._location_mode, + progress_hook=self._progress_hook, **self._request_options ) if parallel: import concurrent.futures - executor = concurrent.futures.ThreadPoolExecutor(self._max_concurrency) - list(executor.map( - with_current_context(downloader.process_chunk), - downloader.get_chunk_offsets() - )) + with concurrent.futures.ThreadPoolExecutor(self._max_concurrency) as executor: + list(executor.map( + with_current_context(downloader.process_chunk), + downloader.get_chunk_offsets() + )) else: for chunk in downloader.get_chunk_offsets(): downloader.process_chunk(chunk) - return self.size + + self._complete_read() + return remaining_size + + def _complete_read(self): + """Adjusts all offsets to the end of the download.""" + self._download_offset = self.size + self._raw_download_offset = self.size + self._read_offset = self.size + self._current_content_offset = len(self._current_content) + + def _check_and_report_progress(self): + """Reports progress if necessary.""" + # Only report progress at the end of each chunk and use download_offset to always report + # progress in terms of (approximate) byte count. + if self._progress_hook and self._current_content_offset == len(self._current_content): + self._progress_hook(self._download_offset, self.size) + + def content_as_bytes(self, max_concurrency=1): + """DEPRECATED: Download the contents of this file. + + This operation is blocking until all data is downloaded. + + This method is deprecated, use func:`readall` instead. + + :param int max_concurrency: + The number of parallel connections with which to download. + :returns: The contents of the file as bytes. + :rtype: bytes + """ + warnings.warn( + "content_as_bytes is deprecated, use readall instead", + DeprecationWarning + ) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. " + "content_as_bytes is not supported in text mode.") + + self._max_concurrency = max_concurrency + return self.readall() + + def content_as_text(self, max_concurrency=1, encoding="UTF-8"): + """DEPRECATED: Download the contents of this blob, and decode as text. + + This operation is blocking until all data is downloaded. + + This method is deprecated, use func:`readall` instead. + + :param int max_concurrency: + The number of parallel connections with which to download. + :param str encoding: + Test encoding to decode the downloaded bytes. Default is UTF-8. + :returns: The content of the file as a str. + :rtype: str + """ + warnings.warn( + "content_as_text is deprecated, use readall instead", + DeprecationWarning + ) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. " + "content_as_text is not supported in text mode.") + + self._max_concurrency = max_concurrency + self._encoding = encoding + return self.readall() def download_to_stream(self, stream, max_concurrency=1): - """Download the contents of this blob to a stream. + """DEPRECATED: Download the contents of this blob to a stream. + + This method is deprecated, use func:`readinto` instead. - :param stream: + :param IO[T] stream: The stream to download to. This can be an open file-handle, or any writable stream. The stream must be seekable if the download uses more than one parallel connection. + :param int max_concurrency: + The number of parallel connections with which to download. :returns: The properties of the downloaded blob. :rtype: Any """ @@ -575,6 +924,10 @@ def download_to_stream(self, stream, max_concurrency=1): "download_to_stream is deprecated, use readinto instead", DeprecationWarning ) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. " + "download_to_stream is not supported in text mode.") + self._max_concurrency = max_concurrency self.readinto(stream) return self.properties diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_encryption.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_encryption.py new file mode 100644 index 000000000000..42f5c51d0762 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_encryption.py @@ -0,0 +1,1127 @@ +# pylint: disable=too-many-lines +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import math +import os +import sys +import warnings +from collections import OrderedDict +from io import BytesIO +from json import ( + dumps, + loads, +) +from typing import Any, Callable, Dict, IO, Optional, Tuple, TYPE_CHECKING +from typing import OrderedDict as TypedOrderedDict +from typing_extensions import Protocol + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.aead import AESGCM +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import CBC +from cryptography.hazmat.primitives.padding import PKCS7 + +from azure.core.exceptions import HttpResponseError +from azure.core.utils import CaseInsensitiveDict + +from ._version import VERSION +from ._shared import decode_base64_to_bytes, encode_base64 + +if TYPE_CHECKING: + from azure.core.pipeline import PipelineResponse + from cryptography.hazmat.primitives.ciphers import AEADEncryptionContext + from cryptography.hazmat.primitives.padding import PaddingContext + + +_ENCRYPTION_PROTOCOL_V1 = '1.0' +_ENCRYPTION_PROTOCOL_V2 = '2.0' +_ENCRYPTION_PROTOCOL_V2_1 = '2.1' +_VALID_ENCRYPTION_PROTOCOLS = [_ENCRYPTION_PROTOCOL_V1, _ENCRYPTION_PROTOCOL_V2, _ENCRYPTION_PROTOCOL_V2_1] +_ENCRYPTION_V2_PROTOCOLS = [_ENCRYPTION_PROTOCOL_V2, _ENCRYPTION_PROTOCOL_V2_1] +_GCM_REGION_DATA_LENGTH = 4 * 1024 * 1024 +_GCM_NONCE_LENGTH = 12 +_GCM_TAG_LENGTH = 16 + +_ERROR_OBJECT_INVALID = \ + '{0} does not define a complete interface. Value of {1} is either missing or invalid.' + +_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION = ( + 'The require_encryption flag is set, but encryption is not supported' + ' for this method.') + + +class KeyEncryptionKey(Protocol): + + def wrap_key(self, key: bytes) -> bytes: + ... + + def unwrap_key(self, key: bytes, algorithm: str) -> bytes: + ... + + def get_kid(self) -> str: + ... + + def get_key_wrap_algorithm(self) -> str: + ... + + +def _validate_not_none(param_name: str, param: Any): + if param is None: + raise ValueError(f'{param_name} should not be None.') + + +def _validate_key_encryption_key_wrap(kek: KeyEncryptionKey): + # Note that None is not callable and so will fail the second clause of each check. + if not hasattr(kek, 'wrap_key') or not callable(kek.wrap_key): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'wrap_key')) + if not hasattr(kek, 'get_kid') or not callable(kek.get_kid): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid')) + if not hasattr(kek, 'get_key_wrap_algorithm') or not callable(kek.get_key_wrap_algorithm): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_key_wrap_algorithm')) + + +class StorageEncryptionMixin(object): + def _configure_encryption(self, kwargs: Dict[str, Any]): + self.require_encryption = kwargs.get("require_encryption", False) + self.encryption_version = kwargs.get("encryption_version", "1.0") + self.key_encryption_key = kwargs.get("key_encryption_key") + self.key_resolver_function = kwargs.get("key_resolver_function") + if self.key_encryption_key and self.encryption_version == '1.0': + warnings.warn("This client has been configured to use encryption with version 1.0. " + + "Version 1.0 is deprecated and no longer considered secure. It is highly " + + "recommended that you switch to using version 2.0. The version can be " + + "specified using the 'encryption_version' keyword.") + + +class _EncryptionAlgorithm(object): + """ + Specifies which client encryption algorithm is used. + """ + AES_CBC_256 = 'AES_CBC_256' + AES_GCM_256 = 'AES_GCM_256' + + +class _WrappedContentKey: + """ + Represents the envelope key details stored on the service. + """ + + def __init__(self, algorithm: str, encrypted_key: bytes, key_id: str) -> None: + """ + :param str algorithm: + The algorithm used for wrapping. + :param bytes encrypted_key: + The encrypted content-encryption-key. + :param str key_id: + The key-encryption-key identifier string. + """ + _validate_not_none('algorithm', algorithm) + _validate_not_none('encrypted_key', encrypted_key) + _validate_not_none('key_id', key_id) + + self.algorithm = algorithm + self.encrypted_key = encrypted_key + self.key_id = key_id + + +class _EncryptedRegionInfo: + """ + Represents the length of encryption elements. + This is only used for Encryption V2. + """ + + def __init__(self, data_length: int, nonce_length: int, tag_length: int) -> None: + """ + :param int data_length: + The length of the encryption region data (not including nonce + tag). + :param int nonce_length: + The length of nonce used when encrypting. + :param int tag_length: + The length of the encryption tag. + """ + _validate_not_none('data_length', data_length) + _validate_not_none('nonce_length', nonce_length) + _validate_not_none('tag_length', tag_length) + + self.data_length = data_length + self.nonce_length = nonce_length + self.tag_length = tag_length + + +class _EncryptionAgent: + """ + Represents the encryption agent stored on the service. + It consists of the encryption protocol version and encryption algorithm used. + """ + + def __init__(self, encryption_algorithm: _EncryptionAlgorithm, protocol: str) -> None: + """ + :param _EncryptionAlgorithm encryption_algorithm: + The algorithm used for encrypting the message contents. + :param str protocol: + The protocol version used for encryption. + """ + _validate_not_none('encryption_algorithm', encryption_algorithm) + _validate_not_none('protocol', protocol) + + self.encryption_algorithm = str(encryption_algorithm) + self.protocol = protocol + + +class _EncryptionData: + """ + Represents the encryption data that is stored on the service. + """ + + def __init__( + self, content_encryption_IV: Optional[bytes], + encrypted_region_info: Optional[_EncryptedRegionInfo], + encryption_agent: _EncryptionAgent, + wrapped_content_key: _WrappedContentKey, + key_wrapping_metadata: Dict[str, Any] + ) -> None: + """ + :param Optional[bytes] content_encryption_IV: + The content encryption initialization vector. + Required for AES-CBC (V1). + :param Optional[_EncryptedRegionInfo] encrypted_region_info: + The info about the autenticated block sizes. + Required for AES-GCM (V2). + :param _EncryptionAgent encryption_agent: + The encryption agent. + :param _WrappedContentKey wrapped_content_key: + An object that stores the wrapping algorithm, the key identifier, + and the encrypted key bytes. + :param Dict[str, Any] key_wrapping_metadata: + A dict containing metadata related to the key wrapping. + """ + _validate_not_none('encryption_agent', encryption_agent) + _validate_not_none('wrapped_content_key', wrapped_content_key) + + # Validate we have the right matching optional parameter for the specified algorithm + if encryption_agent.encryption_algorithm == _EncryptionAlgorithm.AES_CBC_256: + _validate_not_none('content_encryption_IV', content_encryption_IV) + elif encryption_agent.encryption_algorithm == _EncryptionAlgorithm.AES_GCM_256: + _validate_not_none('encrypted_region_info', encrypted_region_info) + else: + raise ValueError("Invalid encryption algorithm.") + + self.content_encryption_IV = content_encryption_IV + self.encrypted_region_info = encrypted_region_info + self.encryption_agent = encryption_agent + self.wrapped_content_key = wrapped_content_key + self.key_wrapping_metadata = key_wrapping_metadata + + +class GCMBlobEncryptionStream: + """ + A stream that performs AES-GCM encryption on the given data as + it's streamed. Data is read and encrypted in regions. The stream + will use the same encryption key and will generate a guaranteed unique + nonce for each encryption region. + """ + def __init__( + self, content_encryption_key: bytes, + data_stream: IO[bytes], + ) -> None: + """ + :param bytes content_encryption_key: The encryption key to use. + :param IO[bytes] data_stream: The data stream to read data from. + """ + self.content_encryption_key = content_encryption_key + self.data_stream = data_stream + + self.offset = 0 + self.current = b'' + self.nonce_counter = 0 + + def read(self, size: int = -1) -> bytes: + """ + Read data from the stream. Specify -1 to read all available data. + + :param int size: The amount of data to read. Defaults to -1 for all data. + :return: The bytes read. + :rtype: bytes + """ + result = BytesIO() + remaining = sys.maxsize if size == -1 else size + + while remaining > 0: + # Start by reading from current + if len(self.current) > 0: + read = min(remaining, len(self.current)) + result.write(self.current[:read]) + + self.current = self.current[read:] + self.offset += read + remaining -= read + + if remaining > 0: + # Read one region of data and encrypt it + data = self.data_stream.read(_GCM_REGION_DATA_LENGTH) + if len(data) == 0: + # No more data to read + break + + self.current = encrypt_data_v2(data, self.nonce_counter, self.content_encryption_key) + # IMPORTANT: Must increment the nonce each time. + self.nonce_counter += 1 + + return result.getvalue() + + +def encrypt_data_v2(data: bytes, nonce: int, key: bytes) -> bytes: + """ + Encrypts the given data using the given nonce and key using AES-GCM. + The result includes the data in the form: nonce + ciphertext + tag. + + :param bytes data: The raw data to encrypt. + :param int nonce: The nonce to use for encryption. + :param bytes key: The encryption key to use for encryption. + :return: The encrypted bytes in the form: nonce + ciphertext + tag. + :rtype: bytes + """ + nonce_bytes = nonce.to_bytes(_GCM_NONCE_LENGTH, 'big') + aesgcm = AESGCM(key) + + # Returns ciphertext + tag + ciphertext_with_tag = aesgcm.encrypt(nonce_bytes, data, None) + return nonce_bytes + ciphertext_with_tag + + +def is_encryption_v2(encryption_data: Optional[_EncryptionData]) -> bool: + """ + Determine whether the given encryption data signifies version 2.0 or 2.1. + + :param Optional[_EncryptionData] encryption_data: The encryption data. Will return False if this is None. + :return: True, if the encryption data indicates encryption V2, false otherwise. + :rtype: bool + """ + # If encryption_data is None, assume no encryption + return bool(encryption_data and (encryption_data.encryption_agent.protocol in _ENCRYPTION_V2_PROTOCOLS)) + + +def modify_user_agent_for_encryption( + user_agent: str, + moniker: str, + encryption_version: str, + request_options: Dict[str, Any] + ) -> None: + """ + Modifies the request options to contain a user agent string updated with encryption information. + Adds azstorage-clientsideencryption/ immediately proceeding the SDK descriptor. + + :param str user_agent: The existing User Agent to modify. + :param str moniker: The specific SDK moniker. The modification will immediately proceed azsdk-python-{moniker}. + :param str encryption_version: The version of encryption being used. + :param Dict[str, Any] request_options: The reuqest options to add the user agent override to. + """ + # If the user has specified user_agent_overwrite=True, don't make any modifications + if request_options.get('user_agent_overwrite'): + return + + # If the feature flag is already present, don't add it again + feature_flag = f"azstorage-clientsideencryption/{encryption_version}" + if feature_flag in user_agent: + return + + index = user_agent.find(f"azsdk-python-{moniker}") + user_agent = f"{user_agent[:index]}{feature_flag} {user_agent[index:]}" + # Since we are using user_agent_overwrite=True, we must prepend the user's user_agent if there is one + if request_options.get('user_agent'): + user_agent = f"{request_options.get('user_agent')} {user_agent}" + + request_options['user_agent'] = user_agent + request_options['user_agent_overwrite'] = True + + +def get_adjusted_upload_size(length: int, encryption_version: str) -> int: + """ + Get the adjusted size of the blob upload which accounts for + extra encryption data (padding OR nonce + tag). + + :param int length: The plaintext data length. + :param str encryption_version: The version of encryption being used. + :return: The new upload size to use. + :rtype: int + """ + if encryption_version == _ENCRYPTION_PROTOCOL_V1: + return length + (16 - (length % 16)) + + if encryption_version == _ENCRYPTION_PROTOCOL_V2: + encryption_data_length = _GCM_NONCE_LENGTH + _GCM_TAG_LENGTH + regions = math.ceil(length / _GCM_REGION_DATA_LENGTH) + return length + (regions * encryption_data_length) + + raise ValueError("Invalid encryption version specified.") + + +def get_adjusted_download_range_and_offset( + start: int, + end: int, + length: Optional[int], + encryption_data: Optional[_EncryptionData]) -> Tuple[Tuple[int, int], Tuple[int, int]]: + """ + Gets the new download range and offsets into the decrypted data for + the given user-specified range. The new download range will include all + the data needed to decrypt the user-provided range and will include only + full encryption regions. + + The offsets returned will be the offsets needed to fetch the user-requested + data out of the full decrypted data. The end offset is different based on the + encryption version. For V1, the end offset is offset from the end whereas for + V2, the end offset is the ending index into the stream. + V1: decrypted_data[start_offset : len(decrypted_data) - end_offset] + V2: decrypted_data[start_offset : end_offset] + + :param int start: The user-requested start index. + :param int end: The user-requested end index. + :param Optional[int] length: The user-requested length. Only used for V1. + :param Optional[_EncryptionData] encryption_data: The encryption data to determine version and sizes. + :return: (new start, new end), (start offset, end offset) + :rtype: Tuple[Tuple[int, int], Tuple[int, int]] + """ + start_offset, end_offset = 0, 0 + if encryption_data is None: + return (start, end), (start_offset, end_offset) + + if encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V1: + if start is not None: + # Align the start of the range along a 16 byte block + start_offset = start % 16 + start -= start_offset + + # Include an extra 16 bytes for the IV if necessary + # Because of the previous offsetting, start_range will always + # be a multiple of 16. + if start > 0: + start_offset += 16 + start -= 16 + + if length is not None: + # Align the end of the range along a 16 byte block + end_offset = 15 - (end % 16) + end += end_offset + + elif encryption_data.encryption_agent.protocol in _ENCRYPTION_V2_PROTOCOLS: + start_offset, end_offset = 0, end + + if encryption_data.encrypted_region_info is None: + raise ValueError("Missing required metadata for Encryption V2") + + nonce_length = encryption_data.encrypted_region_info.nonce_length + data_length = encryption_data.encrypted_region_info.data_length + tag_length = encryption_data.encrypted_region_info.tag_length + region_length = nonce_length + data_length + tag_length + requested_length = end - start + + if start is not None: + # Find which data region the start is in + region_num = start // data_length + # The start of the data region is different from the start of the encryption region + data_start = region_num * data_length + region_start = region_num * region_length + # Offset is based on data region + start_offset = start - data_start + # New start is the start of the encryption region + start = region_start + + if end is not None: + # Find which data region the end is in + region_num = end // data_length + end_offset = start_offset + requested_length + 1 + # New end is the end of the encryption region + end = (region_num * region_length) + region_length - 1 + + return (start, end), (start_offset, end_offset) + + +def parse_encryption_data(metadata: Dict[str, Any]) -> Optional[_EncryptionData]: + """ + Parses the encryption data out of the given blob metadata. If metadata does + not exist or there are parsing errors, this function will just return None. + + :param Dict[str, Any] metadata: The blob metadata parsed from the response. + :return: The encryption data or None + :rtype: Optional[_EncryptionData] + """ + try: + # Use case insensitive dict as key needs to be case-insensitive + case_insensitive_metadata = CaseInsensitiveDict(metadata) + return _dict_to_encryption_data(loads(case_insensitive_metadata['encryptiondata'])) + except: # pylint: disable=bare-except + return None + + +def adjust_blob_size_for_encryption(size: int, encryption_data: Optional[_EncryptionData]) -> int: + """ + Adjusts the given blob size for encryption by subtracting the size of + the encryption data (nonce + tag). This only has an affect for encryption V2. + + :param int size: The original blob size. + :param Optional[_EncryptionData] encryption_data: The encryption data to determine version and sizes. + :return: The new blob size. + :rtype: int + """ + if (encryption_data is not None and + encryption_data.encrypted_region_info is not None and + is_encryption_v2(encryption_data)): + + nonce_length = encryption_data.encrypted_region_info.nonce_length + data_length = encryption_data.encrypted_region_info.data_length + tag_length = encryption_data.encrypted_region_info.tag_length + region_length = nonce_length + data_length + tag_length + + num_regions = math.ceil(size / region_length) + metadata_size = num_regions * (nonce_length + tag_length) + return size - metadata_size + + return size + + +def _generate_encryption_data_dict( + kek: KeyEncryptionKey, + cek: bytes, + iv: Optional[bytes], + version: str + ) -> TypedOrderedDict[str, Any]: + """ + Generates and returns the encryption metadata as a dict. + + :param KeyEncryptionKey kek: The key encryption key. See calling functions for more information. + :param bytes cek: The content encryption key. + :param Optional[bytes] iv: The initialization vector. Only required for AES-CBC. + :param str version: The client encryption version used. + :return: A dict containing all the encryption metadata. + :rtype: Dict[str, Any] + """ + # Encrypt the cek. + if version == _ENCRYPTION_PROTOCOL_V1: + wrapped_cek = kek.wrap_key(cek) + # For V2, we include the encryption version in the wrapped key. + elif version == _ENCRYPTION_PROTOCOL_V2: + # We must pad the version to 8 bytes for AES Keywrap algorithms + to_wrap = _ENCRYPTION_PROTOCOL_V2.encode().ljust(8, b'\0') + cek + wrapped_cek = kek.wrap_key(to_wrap) + else: + raise ValueError("Invalid encryption version specified.") + + # Build the encryption_data dict. + # Use OrderedDict to comply with Java's ordering requirement. + wrapped_content_key = OrderedDict() + wrapped_content_key['KeyId'] = kek.get_kid() + wrapped_content_key['EncryptedKey'] = encode_base64(wrapped_cek) + wrapped_content_key['Algorithm'] = kek.get_key_wrap_algorithm() + + encryption_agent = OrderedDict() + encryption_agent['Protocol'] = version + + if version == _ENCRYPTION_PROTOCOL_V1: + encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_CBC_256 + + elif version == _ENCRYPTION_PROTOCOL_V2: + encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_GCM_256 + + encrypted_region_info = OrderedDict() + encrypted_region_info['DataLength'] = _GCM_REGION_DATA_LENGTH + encrypted_region_info['NonceLength'] = _GCM_NONCE_LENGTH + + encryption_data_dict: TypedOrderedDict[str, Any] = OrderedDict() + encryption_data_dict['WrappedContentKey'] = wrapped_content_key + encryption_data_dict['EncryptionAgent'] = encryption_agent + if version == _ENCRYPTION_PROTOCOL_V1: + encryption_data_dict['ContentEncryptionIV'] = encode_base64(iv) + elif version == _ENCRYPTION_PROTOCOL_V2: + encryption_data_dict['EncryptedRegionInfo'] = encrypted_region_info + encryption_data_dict['KeyWrappingMetadata'] = OrderedDict({'EncryptionLibrary': 'Python ' + VERSION}) + + return encryption_data_dict + + +def _dict_to_encryption_data(encryption_data_dict: Dict[str, Any]) -> _EncryptionData: + """ + Converts the specified dictionary to an EncryptionData object for + eventual use in decryption. + + :param dict encryption_data_dict: + The dictionary containing the encryption data. + :return: an _EncryptionData object built from the dictionary. + :rtype: _EncryptionData + """ + try: + protocol = encryption_data_dict['EncryptionAgent']['Protocol'] + if protocol not in _VALID_ENCRYPTION_PROTOCOLS: + raise ValueError("Unsupported encryption version.") + except KeyError as exc: + raise ValueError("Unsupported encryption version.") from exc + wrapped_content_key = encryption_data_dict['WrappedContentKey'] + wrapped_content_key = _WrappedContentKey(wrapped_content_key['Algorithm'], + decode_base64_to_bytes(wrapped_content_key['EncryptedKey']), + wrapped_content_key['KeyId']) + + encryption_agent = encryption_data_dict['EncryptionAgent'] + encryption_agent = _EncryptionAgent(encryption_agent['EncryptionAlgorithm'], + encryption_agent['Protocol']) + + if 'KeyWrappingMetadata' in encryption_data_dict: + key_wrapping_metadata = encryption_data_dict['KeyWrappingMetadata'] + else: + key_wrapping_metadata = None + + # AES-CBC only + encryption_iv = None + if 'ContentEncryptionIV' in encryption_data_dict: + encryption_iv = decode_base64_to_bytes(encryption_data_dict['ContentEncryptionIV']) + + # AES-GCM only + region_info = None + if 'EncryptedRegionInfo' in encryption_data_dict: + encrypted_region_info = encryption_data_dict['EncryptedRegionInfo'] + region_info = _EncryptedRegionInfo(encrypted_region_info['DataLength'], + encrypted_region_info['NonceLength'], + _GCM_TAG_LENGTH) + + encryption_data = _EncryptionData(encryption_iv, + region_info, + encryption_agent, + wrapped_content_key, + key_wrapping_metadata) + + return encryption_data + + +def _generate_AES_CBC_cipher(cek: bytes, iv: bytes) -> Cipher: + """ + Generates and returns an encryption cipher for AES CBC using the given cek and iv. + + :param bytes[] cek: The content encryption key for the cipher. + :param bytes[] iv: The initialization vector for the cipher. + :return: A cipher for encrypting in AES256 CBC. + :rtype: ~cryptography.hazmat.primitives.ciphers.Cipher + """ + + backend = default_backend() + algorithm = AES(cek) + mode = CBC(iv) + return Cipher(algorithm, mode, backend) + + +def _validate_and_unwrap_cek( + encryption_data: _EncryptionData, + key_encryption_key: Optional[KeyEncryptionKey] = None, + key_resolver: Optional[Callable[[str], KeyEncryptionKey]] = None +) -> bytes: + """ + Extracts and returns the content_encryption_key stored in the encryption_data object + and performs necessary validation on all parameters. + :param _EncryptionData encryption_data: + The encryption metadata of the retrieved value. + :param Optional[KeyEncryptionKey] key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key) + - Wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm() + - Returns the algorithm used to wrap the specified symmetric key. + get_kid() + - Returns a string key id for this key-encryption-key. + :param Optional[Callable[[str], KeyEncryptionKey]] key_resolver: + A function used that, given a key_id, will return a key_encryption_key. Please refer + to high-level service object instance variables for more details. + :return: The content_encryption_key stored in the encryption_data object. + :rtype: bytes + """ + + _validate_not_none('encrypted_key', encryption_data.wrapped_content_key.encrypted_key) + + # Validate we have the right info for the specified version + if encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V1: + _validate_not_none('content_encryption_IV', encryption_data.content_encryption_IV) + elif encryption_data.encryption_agent.protocol in _ENCRYPTION_V2_PROTOCOLS: + _validate_not_none('encrypted_region_info', encryption_data.encrypted_region_info) + else: + raise ValueError('Specified encryption version is not supported.') + + content_encryption_key: Optional[bytes] = None + + # If the resolver exists, give priority to the key it finds. + if key_resolver is not None: + key_encryption_key = key_resolver(encryption_data.wrapped_content_key.key_id) + + if key_encryption_key is None: + raise ValueError("Unable to decrypt. key_resolver and key_encryption_key cannot both be None.") + if not hasattr(key_encryption_key, 'get_kid') or not callable(key_encryption_key.get_kid): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid')) + if not hasattr(key_encryption_key, 'unwrap_key') or not callable(key_encryption_key.unwrap_key): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'unwrap_key')) + if encryption_data.wrapped_content_key.key_id != key_encryption_key.get_kid(): + raise ValueError('Provided or resolved key-encryption-key does not match the id of key used to encrypt.') + # Will throw an exception if the specified algorithm is not supported. + content_encryption_key = key_encryption_key.unwrap_key( + encryption_data.wrapped_content_key.encrypted_key, + encryption_data.wrapped_content_key.algorithm) + + # For V2, the version is included with the cek. We need to validate it + # and remove it from the actual cek. + if encryption_data.encryption_agent.protocol in _ENCRYPTION_V2_PROTOCOLS: + version_2_bytes = encryption_data.encryption_agent.protocol.encode().ljust(8, b'\0') + cek_version_bytes = content_encryption_key[:len(version_2_bytes)] + if cek_version_bytes != version_2_bytes: + raise ValueError('The encryption metadata is not valid and may have been modified.') + + # Remove version from the start of the cek. + content_encryption_key = content_encryption_key[len(version_2_bytes):] + + _validate_not_none('content_encryption_key', content_encryption_key) + + return content_encryption_key + + +def _decrypt_message( + message: bytes, + encryption_data: _EncryptionData, + key_encryption_key: Optional[KeyEncryptionKey] = None, + resolver: Optional[Callable[[str], KeyEncryptionKey]] = None +) -> bytes: + """ + Decrypts the given ciphertext using AES256 in CBC mode with 128 bit padding. + Unwraps the content-encryption-key using the user-provided or resolved key-encryption-key (kek). + Returns the original plaintext. + + :param bytes message: + The ciphertext to be decrypted. + :param _EncryptionData encryption_data: + The metadata associated with this ciphertext. + :param Optional[KeyEncryptionKey] key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key) + - Wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm() + - Returns the algorithm used to wrap the specified symmetric key. + get_kid() + - Returns a string key id for this key-encryption-key. + :param Optional[Callable[[str], KeyEncryptionKey]] resolver: + The user-provided key resolver. Uses the kid string to return a key-encryption-key + implementing the interface defined above. + :return: The decrypted plaintext. + :rtype: bytes + """ + _validate_not_none('message', message) + content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, resolver) + + if encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V1: + if not encryption_data.content_encryption_IV: + raise ValueError("Missing required metadata for decryption.") + + cipher = _generate_AES_CBC_cipher(content_encryption_key, encryption_data.content_encryption_IV) + + # decrypt data + decryptor = cipher.decryptor() + decrypted_data = (decryptor.update(message) + decryptor.finalize()) + + # unpad data + unpadder = PKCS7(128).unpadder() + decrypted_data = (unpadder.update(decrypted_data) + unpadder.finalize()) + + elif encryption_data.encryption_agent.protocol in _ENCRYPTION_V2_PROTOCOLS: + block_info = encryption_data.encrypted_region_info + if not block_info or not block_info.nonce_length: + raise ValueError("Missing required metadata for decryption.") + + if encryption_data.encrypted_region_info is None: + raise ValueError("Missing required metadata for Encryption V2") + + nonce_length = int(encryption_data.encrypted_region_info.nonce_length) + + # First bytes are the nonce + nonce = message[:nonce_length] + ciphertext_with_tag = message[nonce_length:] + + aesgcm = AESGCM(content_encryption_key) + decrypted_data = aesgcm.decrypt(nonce, ciphertext_with_tag, None) + + else: + raise ValueError('Specified encryption version is not supported.') + + return decrypted_data + + +def encrypt_blob(blob: bytes, key_encryption_key: KeyEncryptionKey, version: str) -> Tuple[str, bytes]: + """ + Encrypts the given blob using the given encryption protocol version. + Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek). + Returns a json-formatted string containing the encryption metadata. This method should + only be used when a blob is small enough for single shot upload. Encrypting larger blobs + is done as a part of the upload_data_chunks method. + + :param bytes blob: + The blob to be encrypted. + :param KeyEncryptionKey key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key) + - Wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm() + - Returns the algorithm used to wrap the specified symmetric key. + get_kid() + - Returns a string key id for this key-encryption-key. + :param str version: The client encryption version to use. + :return: A tuple of json-formatted string containing the encryption metadata and the encrypted blob data. + :rtype: (str, bytes) + """ + + _validate_not_none('blob', blob) + _validate_not_none('key_encryption_key', key_encryption_key) + _validate_key_encryption_key_wrap(key_encryption_key) + + if version == _ENCRYPTION_PROTOCOL_V1: + # AES256 uses 256 bit (32 byte) keys and always with 16 byte blocks + content_encryption_key = os.urandom(32) + initialization_vector = os.urandom(16) + + cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector) + + # PKCS7 with 16 byte blocks ensures compatibility with AES. + padder = PKCS7(128).padder() + padded_data = padder.update(blob) + padder.finalize() + + # Encrypt the data. + encryptor = cipher.encryptor() + encrypted_data = encryptor.update(padded_data) + encryptor.finalize() + + elif version == _ENCRYPTION_PROTOCOL_V2: + # AES256 GCM uses 256 bit (32 byte) keys and a 12 byte nonce. + content_encryption_key = os.urandom(32) + initialization_vector = None + + data = BytesIO(blob) + encryption_stream = GCMBlobEncryptionStream(content_encryption_key, data) + + encrypted_data = encryption_stream.read() + + else: + raise ValueError("Invalid encryption version specified.") + + encryption_data = _generate_encryption_data_dict(key_encryption_key, content_encryption_key, + initialization_vector, version) + encryption_data['EncryptionMode'] = 'FullBlob' + + return dumps(encryption_data), encrypted_data + + +def generate_blob_encryption_data( + key_encryption_key: Optional[KeyEncryptionKey], + version: str +) -> Tuple[Optional[bytes], Optional[bytes], Optional[str]]: + """ + Generates the encryption_metadata for the blob. + + :param Optional[KeyEncryptionKey] key_encryption_key: + The key-encryption-key used to wrap the cek associate with this blob. + :param str version: The client encryption version to use. + :return: A tuple containing the cek and iv for this blob as well as the + serialized encryption metadata for the blob. + :rtype: (Optional[bytes], Optional[bytes], Optional[str]) + """ + + encryption_data = None + content_encryption_key = None + initialization_vector = None + if key_encryption_key: + _validate_key_encryption_key_wrap(key_encryption_key) + content_encryption_key = os.urandom(32) + # Initialization vector only needed for V1 + if version == _ENCRYPTION_PROTOCOL_V1: + initialization_vector = os.urandom(16) + encryption_data_dict = _generate_encryption_data_dict(key_encryption_key, + content_encryption_key, + initialization_vector, + version) + encryption_data_dict['EncryptionMode'] = 'FullBlob' + encryption_data = dumps(encryption_data_dict) + + return content_encryption_key, initialization_vector, encryption_data + + +def decrypt_blob( # pylint: disable=too-many-locals,too-many-statements + require_encryption: bool, + key_encryption_key: Optional[KeyEncryptionKey], + key_resolver: Optional[Callable[[str], KeyEncryptionKey]], + content: bytes, + start_offset: int, + end_offset: int, + response_headers: Dict[str, Any] +) -> bytes: + """ + Decrypts the given blob contents and returns only the requested range. + + :param bool require_encryption: + Whether the calling blob service requires objects to be decrypted. + :param Optional[KeyEncryptionKey] key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key) + - Wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm() + - Returns the algorithm used to wrap the specified symmetric key. + get_kid() + - Returns a string key id for this key-encryption-key. + :param key_resolver: + The user-provided key resolver. Uses the kid string to return a key-encryption-key + implementing the interface defined above. + :type key_resolver: Optional[Callable[[str], KeyEncryptionKey]] + :param bytes content: + The encrypted blob content. + :param int start_offset: + The adjusted offset from the beginning of the *decrypted* content for the caller's data. + :param int end_offset: + The adjusted offset from the end of the *decrypted* content for the caller's data. + :param Dict[str, Any] response_headers: + A dictionary of response headers from the download request. Expected to include the + 'x-ms-meta-encryptiondata' header if the blob was encrypted. + :return: The decrypted blob content. + :rtype: bytes + """ + try: + encryption_data = _dict_to_encryption_data(loads(response_headers['x-ms-meta-encryptiondata'])) + except Exception as exc: # pylint: disable=broad-except + if require_encryption: + raise ValueError( + 'Encryption required, but received data does not contain appropriate metadata.' + \ + 'Data was either not encrypted or metadata has been lost.') from exc + + return content + + algorithm = encryption_data.encryption_agent.encryption_algorithm + if algorithm not in(_EncryptionAlgorithm.AES_CBC_256, _EncryptionAlgorithm.AES_GCM_256): + raise ValueError('Specified encryption algorithm is not supported.') + + version = encryption_data.encryption_agent.protocol + if version not in _VALID_ENCRYPTION_PROTOCOLS: + raise ValueError('Specified encryption version is not supported.') + + content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, key_resolver) + + if version == _ENCRYPTION_PROTOCOL_V1: + blob_type = response_headers['x-ms-blob-type'] + + iv: Optional[bytes] = None + unpad = False + if 'content-range' in response_headers: + content_range = response_headers['content-range'] + # Format: 'bytes x-y/size' + + # Ignore the word 'bytes' + content_range = content_range.split(' ') + + content_range = content_range[1].split('-') + content_range = content_range[1].split('/') + end_range = int(content_range[0]) + blob_size = int(content_range[1]) + + if start_offset >= 16: + iv = content[:16] + content = content[16:] + start_offset -= 16 + else: + iv = encryption_data.content_encryption_IV + + if end_range == blob_size - 1: + unpad = True + else: + unpad = True + iv = encryption_data.content_encryption_IV + + if blob_type == 'PageBlob': + unpad = False + + if iv is None: + raise ValueError("Missing required metadata for Encryption V1") + + cipher = _generate_AES_CBC_cipher(content_encryption_key, iv) + decryptor = cipher.decryptor() + + content = decryptor.update(content) + decryptor.finalize() + if unpad: + unpadder = PKCS7(128).unpadder() + content = unpadder.update(content) + unpadder.finalize() + + return content[start_offset: len(content) - end_offset] + + if version in _ENCRYPTION_V2_PROTOCOLS: + # We assume the content contains only full encryption regions + total_size = len(content) + offset = 0 + + if encryption_data.encrypted_region_info is None: + raise ValueError("Missing required metadata for Encryption V2") + + nonce_length = encryption_data.encrypted_region_info.nonce_length + data_length = encryption_data.encrypted_region_info.data_length + tag_length = encryption_data.encrypted_region_info.tag_length + region_length = nonce_length + data_length + tag_length + + decrypted_content = bytearray() + while offset < total_size: + # Process one encryption region at a time + process_size = min(region_length, total_size) + encrypted_region = content[offset:offset + process_size] + + # First bytes are the nonce + nonce = encrypted_region[:nonce_length] + ciphertext_with_tag = encrypted_region[nonce_length:] + + aesgcm = AESGCM(content_encryption_key) + decrypted_data = aesgcm.decrypt(nonce, ciphertext_with_tag, None) + decrypted_content.extend(decrypted_data) + + offset += process_size + + # Read the caller requested data from the decrypted content + return decrypted_content[start_offset:end_offset] + + raise ValueError('Specified encryption version is not supported.') + + +def get_blob_encryptor_and_padder( + cek: Optional[bytes], + iv: Optional[bytes], + should_pad: bool +) -> Tuple[Optional["AEADEncryptionContext"], Optional["PaddingContext"]]: + encryptor = None + padder = None + + if cek is not None and iv is not None: + cipher = _generate_AES_CBC_cipher(cek, iv) + encryptor = cipher.encryptor() + padder = PKCS7(128).padder() if should_pad else None + + return encryptor, padder + + +def encrypt_queue_message(message: str, key_encryption_key: KeyEncryptionKey, version: str) -> str: + """ + Encrypts the given plain text message using the given protocol version. + Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek). + Returns a json-formatted string containing the encrypted message and the encryption metadata. + + :param str message: + The plain text message to be encrypted. + :param KeyEncryptionKey key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key) + - Wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm() + - Returns the algorithm used to wrap the specified symmetric key. + get_kid() + - Returns a string key id for this key-encryption-key. + :param str version: The client encryption version to use. + :return: A json-formatted string containing the encrypted message and the encryption metadata. + :rtype: str + """ + + _validate_not_none('message', message) + _validate_not_none('key_encryption_key', key_encryption_key) + _validate_key_encryption_key_wrap(key_encryption_key) + + # Queue encoding functions all return unicode strings, and encryption should + # operate on binary strings. + message_as_bytes: bytes = message.encode('utf-8') + + if version == _ENCRYPTION_PROTOCOL_V1: + # AES256 CBC uses 256 bit (32 byte) keys and always with 16 byte blocks + content_encryption_key = os.urandom(32) + initialization_vector = os.urandom(16) + + cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector) + + # PKCS7 with 16 byte blocks ensures compatibility with AES. + padder = PKCS7(128).padder() + padded_data = padder.update(message_as_bytes) + padder.finalize() + + # Encrypt the data. + encryptor = cipher.encryptor() + encrypted_data = encryptor.update(padded_data) + encryptor.finalize() + + elif version == _ENCRYPTION_PROTOCOL_V2: + # AES256 GCM uses 256 bit (32 byte) keys and a 12 byte nonce. + content_encryption_key = os.urandom(32) + initialization_vector = None + + # The nonce MUST be different for each key + nonce = os.urandom(12) + aesgcm = AESGCM(content_encryption_key) + + # Returns ciphertext + tag + cipertext_with_tag = aesgcm.encrypt(nonce, message_as_bytes, None) + encrypted_data = nonce + cipertext_with_tag + + else: + raise ValueError("Invalid encryption version specified.") + + # Build the dictionary structure. + queue_message = {'EncryptedMessageContents': encode_base64(encrypted_data), + 'EncryptionData': _generate_encryption_data_dict(key_encryption_key, + content_encryption_key, + initialization_vector, + version)} + + return dumps(queue_message) + + +def decrypt_queue_message( + message: str, + response: "PipelineResponse", + require_encryption: bool, + key_encryption_key: Optional[KeyEncryptionKey], + resolver: Optional[Callable[[str], KeyEncryptionKey]] +) -> str: + """ + Returns the decrypted message contents from an EncryptedQueueMessage. + If no encryption metadata is present, will return the unaltered message. + :param str message: + The JSON formatted QueueEncryptedMessage contents with all associated metadata. + :param Any response: + The pipeline response used to generate an error with. + :param bool require_encryption: + If set, will enforce that the retrieved messages are encrypted and decrypt them. + :param Optional[KeyEncryptionKey] key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key) + - Wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm() + - Returns the algorithm used to wrap the specified symmetric key. + get_kid() + - Returns a string key id for this key-encryption-key. + :param Optional[Callable[[str], KeyEncryptionKey]] resolver: + The user-provided key resolver. Uses the kid string to return a key-encryption-key + implementing the interface defined above. + :return: The plain text message from the queue message. + :rtype: str + """ + response = response.http_response + + try: + deserialized_message: Dict[str, Any] = loads(message) + + encryption_data = _dict_to_encryption_data(deserialized_message['EncryptionData']) + decoded_data = decode_base64_to_bytes(deserialized_message['EncryptedMessageContents']) + except (KeyError, ValueError) as exc: + # Message was not json formatted and so was not encrypted + # or the user provided a json formatted message + # or the metadata was malformed. + if require_encryption: + raise ValueError( + 'Encryption required, but received message does not contain appropriate metatadata. ' + \ + 'Message was either not encrypted or metadata was incorrect.') from exc + + return message + try: + return _decrypt_message(decoded_data, encryption_data, key_encryption_key, resolver).decode('utf-8') + except Exception as error: + raise HttpResponseError( + message="Decryption failed.", + response=response, #type: ignore [arg-type] + error=error) from error diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/__init__.py index cc760e7efd22..62dc43a7722a 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/__init__.py @@ -7,10 +7,17 @@ # -------------------------------------------------------------------------- from ._azure_blob_storage import AzureBlobStorage -__all__ = ['AzureBlobStorage'] try: - from ._patch import patch_sdk # type: ignore - patch_sdk() + from ._patch import __all__ as _patch_all + from ._patch import * # pylint: disable=unused-wildcard-import except ImportError: - pass + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AzureBlobStorage", +] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_azure_blob_storage.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_azure_blob_storage.py index dff7e12276c2..a429b713b744 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_azure_blob_storage.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_azure_blob_storage.py @@ -6,35 +6,34 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +from copy import deepcopy +from typing import Any +from typing_extensions import Self from azure.core import PipelineClient -from msrest import Deserializer, Serializer - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse +from . import models as _models from ._configuration import AzureBlobStorageConfiguration -from .operations import ServiceOperations -from .operations import ContainerOperations -from .operations import DirectoryOperations -from .operations import BlobOperations -from .operations import PageBlobOperations -from .operations import AppendBlobOperations -from .operations import BlockBlobOperations -from . import models +from ._serialization import Deserializer, Serializer +from .operations import ( + AppendBlobOperations, + BlobOperations, + BlockBlobOperations, + ContainerOperations, + PageBlobOperations, + ServiceOperations, +) -class AzureBlobStorage(object): +class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword """AzureBlobStorage. :ivar service: ServiceOperations operations :vartype service: azure.storage.blob.operations.ServiceOperations :ivar container: ContainerOperations operations :vartype container: azure.storage.blob.operations.ContainerOperations - :ivar directory: DirectoryOperations operations - :vartype directory: azure.storage.blob.operations.DirectoryOperations :ivar blob: BlobOperations operations :vartype blob: azure.storage.blob.operations.BlobOperations :ivar page_blob: PageBlobOperations operations @@ -43,49 +42,78 @@ class AzureBlobStorage(object): :vartype append_blob: azure.storage.blob.operations.AppendBlobOperations :ivar block_blob: BlockBlobOperations operations :vartype block_blob: azure.storage.blob.operations.BlockBlobOperations - :param url: The URL of the service account, container, or blob that is the targe of the desired operation. + :param url: The URL of the service account, container, or blob that is the target of the + desired operation. Required. :type url: str + :param base_url: Service URL. Required. Default value is "". + :type base_url: str + :keyword version: Specifies the version of the operation to use for this request. Default value + is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + :paramtype version: str """ - def __init__( - self, - url, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - base_url = '{url}' - self._config = AzureBlobStorageConfiguration(url, **kwargs) - self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs) - - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + def __init__( # pylint: disable=missing-client-constructor-parameter-credential + self, url: str, base_url: str = "", **kwargs: Any + ) -> None: + self._config = AzureBlobStorageConfiguration(url=url, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=base_url, policies=_policies, **kwargs) + + client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) + self.container = ContainerOperations(self._client, self._config, self._serialize, self._deserialize) + self.blob = BlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.page_blob = PageBlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.append_blob = AppendBlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.block_blob = BlockBlobOperations(self._client, self._config, self._serialize, self._deserialize) + + def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore - self.service = ServiceOperations( - self._client, self._config, self._serialize, self._deserialize) - self.container = ContainerOperations( - self._client, self._config, self._serialize, self._deserialize) - self.directory = DirectoryOperations( - self._client, self._config, self._serialize, self._deserialize) - self.blob = BlobOperations( - self._client, self._config, self._serialize, self._deserialize) - self.page_blob = PageBlobOperations( - self._client, self._config, self._serialize, self._deserialize) - self.append_blob = AppendBlobOperations( - self._client, self._config, self._serialize, self._deserialize) - self.block_blob = BlockBlobOperations( - self._client, self._config, self._serialize, self._deserialize) - - def close(self): - # type: () -> None + def close(self) -> None: self._client.close() - def __enter__(self): - # type: () -> AzureBlobStorage + def __enter__(self) -> Self: self._client.__enter__() return self - def __exit__(self, *exc_details): - # type: (Any) -> None + def __exit__(self, *exc_details: Any) -> None: self._client.__exit__(*exc_details) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_configuration.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_configuration.py index 6c37b2421150..80772c6e3aa2 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_configuration.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_configuration.py @@ -6,53 +6,46 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +from typing import Any, Literal -from azure.core.configuration import Configuration from azure.core.pipeline import policies -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any - VERSION = "unknown" -class AzureBlobStorageConfiguration(Configuration): + +class AzureBlobStorageConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long """Configuration for AzureBlobStorage. Note that all parameters used to create this instance are saved as instance attributes. - :param url: The URL of the service account, container, or blob that is the targe of the desired operation. + :param url: The URL of the service account, container, or blob that is the target of the + desired operation. Required. :type url: str + :keyword version: Specifies the version of the operation to use for this request. Default value + is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + :paramtype version: str """ - def __init__( - self, - url, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + def __init__(self, url: str, **kwargs: Any) -> None: + version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05") + if url is None: raise ValueError("Parameter 'url' must not be None.") - super(AzureBlobStorageConfiguration, self).__init__(**kwargs) self.url = url - self.version = "2020-04-08" - kwargs.setdefault('sdk_moniker', 'azureblobstorage/{}'.format(VERSION)) + self.version = version + kwargs.setdefault("sdk_moniker", "azureblobstorage/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) - def _configure( - self, - **kwargs # type: Any - ): - # type: (...) -> None - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_patch.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_patch.py new file mode 100644 index 000000000000..4688ca7f8ac2 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_patch.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# This file is used for handwritten extensions to the generated code. Example: +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md + + +def patch_sdk(): + pass diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_serialization.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_serialization.py new file mode 100644 index 000000000000..01a226bd7f14 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/_serialization.py @@ -0,0 +1,2115 @@ +# pylint: disable=too-many-lines +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + TypeVar, + MutableMapping, + Type, + List, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +ModelType = TypeVar("ModelType", bound="Model") +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0. + + :param datetime.datetime dt: The datetime + :returns: The offset + :rtype: datetime.timedelta + """ + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation. + + :param datetime.datetime dt: The datetime + :returns: The timestamp representation + :rtype: str + """ + return "Z" + + def dst(self, dt): + """No daylight saving for UTC. + + :param datetime.datetime dt: The datetime + :returns: The daylight saving time + :rtype: datetime.timedelta + """ + return datetime.timedelta(hours=1) + + +try: + from datetime import timezone as _FixedOffset # type: ignore +except ImportError: # Python 2.7 + + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset): + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[Dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + :rtype: ModelType + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls: Type[ModelType], + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> ModelType: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + :rtype: ModelType + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + Remove the polymorphic key from the initial data. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer(object): # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None): + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None): + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k for k, v in response._validation.items() if v.get("readonly") # pylint: disable=protected-access + ] + const = [ + k for k, v in response._validation.items() if v.get("constant") # pylint: disable=protected-access + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises: DeserializationError if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/__init__.py index 12cfcf636c47..62dc43a7722a 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/__init__.py @@ -7,4 +7,17 @@ # -------------------------------------------------------------------------- from ._azure_blob_storage import AzureBlobStorage -__all__ = ['AzureBlobStorage'] + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AzureBlobStorage", +] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/_azure_blob_storage.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/_azure_blob_storage.py index b53703478e99..9a06e367a4d2 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/_azure_blob_storage.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/_azure_blob_storage.py @@ -6,31 +6,34 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any +from copy import deepcopy +from typing import Any, Awaitable +from typing_extensions import Self from azure.core import AsyncPipelineClient -from msrest import Deserializer, Serializer +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest +from .. import models as _models +from .._serialization import Deserializer, Serializer from ._configuration import AzureBlobStorageConfiguration -from .operations import ServiceOperations -from .operations import ContainerOperations -from .operations import DirectoryOperations -from .operations import BlobOperations -from .operations import PageBlobOperations -from .operations import AppendBlobOperations -from .operations import BlockBlobOperations -from .. import models +from .operations import ( + AppendBlobOperations, + BlobOperations, + BlockBlobOperations, + ContainerOperations, + PageBlobOperations, + ServiceOperations, +) -class AzureBlobStorage(object): +class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword """AzureBlobStorage. :ivar service: ServiceOperations operations :vartype service: azure.storage.blob.aio.operations.ServiceOperations :ivar container: ContainerOperations operations :vartype container: azure.storage.blob.aio.operations.ContainerOperations - :ivar directory: DirectoryOperations operations - :vartype directory: azure.storage.blob.aio.operations.DirectoryOperations :ivar blob: BlobOperations operations :vartype blob: azure.storage.blob.aio.operations.BlobOperations :ivar page_blob: PageBlobOperations operations @@ -39,45 +42,80 @@ class AzureBlobStorage(object): :vartype append_blob: azure.storage.blob.aio.operations.AppendBlobOperations :ivar block_blob: BlockBlobOperations operations :vartype block_blob: azure.storage.blob.aio.operations.BlockBlobOperations - :param url: The URL of the service account, container, or blob that is the targe of the desired operation. + :param url: The URL of the service account, container, or blob that is the target of the + desired operation. Required. :type url: str + :param base_url: Service URL. Required. Default value is "". + :type base_url: str + :keyword version: Specifies the version of the operation to use for this request. Default value + is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + :paramtype version: str """ - def __init__( - self, - url: str, - **kwargs: Any + def __init__( # pylint: disable=missing-client-constructor-parameter-credential + self, url: str, base_url: str = "", **kwargs: Any ) -> None: - base_url = '{url}' - self._config = AzureBlobStorageConfiguration(url, **kwargs) - self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs) + self._config = AzureBlobStorageConfiguration(url=url, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=base_url, policies=_policies, **kwargs) - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) + self.container = ContainerOperations(self._client, self._config, self._serialize, self._deserialize) + self.blob = BlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.page_blob = PageBlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.append_blob = AppendBlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.block_blob = BlockBlobOperations(self._client, self._config, self._serialize, self._deserialize) + + def _send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ - self.service = ServiceOperations( - self._client, self._config, self._serialize, self._deserialize) - self.container = ContainerOperations( - self._client, self._config, self._serialize, self._deserialize) - self.directory = DirectoryOperations( - self._client, self._config, self._serialize, self._deserialize) - self.blob = BlobOperations( - self._client, self._config, self._serialize, self._deserialize) - self.page_blob = PageBlobOperations( - self._client, self._config, self._serialize, self._deserialize) - self.append_blob = AppendBlobOperations( - self._client, self._config, self._serialize, self._deserialize) - self.block_blob = BlockBlobOperations( - self._client, self._config, self._serialize, self._deserialize) + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "AzureBlobStorage": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self - async def __aexit__(self, *exc_details) -> None: + async def __aexit__(self, *exc_details: Any) -> None: await self._client.__aexit__(*exc_details) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/_configuration.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/_configuration.py index 5727357d92f7..5128a4f98b2a 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/_configuration.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/_configuration.py @@ -6,47 +6,46 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any +from typing import Any, Literal -from azure.core.configuration import Configuration from azure.core.pipeline import policies VERSION = "unknown" -class AzureBlobStorageConfiguration(Configuration): + +class AzureBlobStorageConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long """Configuration for AzureBlobStorage. Note that all parameters used to create this instance are saved as instance attributes. - :param url: The URL of the service account, container, or blob that is the targe of the desired operation. + :param url: The URL of the service account, container, or blob that is the target of the + desired operation. Required. :type url: str + :keyword version: Specifies the version of the operation to use for this request. Default value + is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + :paramtype version: str """ - def __init__( - self, - url: str, - **kwargs: Any - ) -> None: + def __init__(self, url: str, **kwargs: Any) -> None: + version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05") + if url is None: raise ValueError("Parameter 'url' must not be None.") - super(AzureBlobStorageConfiguration, self).__init__(**kwargs) self.url = url - self.version = "2020-04-08" - kwargs.setdefault('sdk_moniker', 'azureblobstorage/{}'.format(VERSION)) + self.version = version + kwargs.setdefault("sdk_moniker", "azureblobstorage/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) - def _configure( - self, - **kwargs: Any - ) -> None: - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/_patch.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/_patch.py new file mode 100644 index 000000000000..4688ca7f8ac2 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/_patch.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# This file is used for handwritten extensions to the generated code. Example: +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md + + +def patch_sdk(): + pass diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/__init__.py index 62f85c9290c1..1be05c7aa9a7 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/__init__.py @@ -8,18 +8,22 @@ from ._service_operations import ServiceOperations from ._container_operations import ContainerOperations -from ._directory_operations import DirectoryOperations from ._blob_operations import BlobOperations from ._page_blob_operations import PageBlobOperations from ._append_blob_operations import AppendBlobOperations from ._block_blob_operations import BlockBlobOperations +from ._patch import __all__ as _patch_all +from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk + __all__ = [ - 'ServiceOperations', - 'ContainerOperations', - 'DirectoryOperations', - 'BlobOperations', - 'PageBlobOperations', - 'AppendBlobOperations', - 'BlockBlobOperations', + "ServiceOperations", + "ContainerOperations", + "BlobOperations", + "PageBlobOperations", + "AppendBlobOperations", + "BlockBlobOperations", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_append_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_append_blob_operations.py index 333cb9f0e5aa..e466bdaba168 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_append_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_append_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,62 +7,84 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ... import models as _models - -T = TypeVar('T') +from ...operations._append_blob_operations import ( + build_append_block_from_url_request, + build_append_block_request, + build_create_request, + build_seal_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class AppendBlobOperations: - """AppendBlobOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class AppendBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`append_blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def create( + @distributed_trace_async + async def create( # pylint: disable=inconsistent-return-statements self, content_length: int, timeout: Optional[int] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", + metadata: Optional[Dict[str, str]] = None, request_id_parameter: Optional[str] = None, blob_tags_string: Optional[str] = None, - blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Create Append Blob operation creates a new append blob. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -69,38 +92,51 @@ async def create( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["AppendBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "AppendBlob")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -110,6 +146,7 @@ async def create( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -117,361 +154,371 @@ async def create( _if_none_match = None _if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - blob_type = "AppendBlob" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_create_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + blob_type=blob_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - create.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def append_block( + @distributed_trace_async + async def append_block( # pylint: disable=inconsistent-return-statements self, content_length: int, - body: IO, + body: IO[bytes], timeout: Optional[int] = None, - transactional_content_md5: Optional[bytearray] = None, - transactional_content_crc64: Optional[bytearray] = None, - encryption_algorithm: Optional[str] = "AES256", + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - append_position_access_conditions: Optional["_models.AppendPositionAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Append Block operation commits a new block of data to the end of an existing append blob. The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param append_position_access_conditions: Parameter group. - :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions - :param cpk_info: Parameter group. + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _max_size = None _append_position = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if append_position_access_conditions is not None: - _max_size = append_position_access_conditions.max_size _append_position = append_position_access_conditions.append_position + _max_size = append_position_access_conditions.max_size if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "appendblock" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.append_block.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _max_size is not None: - header_parameters['x-ms-blob-condition-maxsize'] = self._serialize.header("max_size", _max_size, 'long') - if _append_position is not None: - header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = body + + _request = build_append_block_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + lease_id=_lease_id, + max_size=_max_size, + append_position=_append_position, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-append-offset"] = self._deserialize( + "str", response.headers.get("x-ms-blob-append-offset") + ) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) - - append_block.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def append_block_from_url( + @distributed_trace_async + async def append_block_from_url( # pylint: disable=inconsistent-return-statements self, source_url: str, content_length: int, source_range: Optional[str] = None, - source_content_md5: Optional[bytearray] = None, - source_contentcrc64: Optional[bytearray] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, timeout: Optional[int] = None, - transactional_content_md5: Optional[bytearray] = None, - encryption_algorithm: Optional[str] = "AES256", + transactional_content_md5: Optional[bytes] = None, request_id_parameter: Optional[str] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - append_position_access_conditions: Optional["_models.AppendPositionAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - **kwargs + copy_source_authorization: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Append Block operation commits a new block of data to the end of an existing append blob where the contents are read from a source url. The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - :param source_url: Specify a URL to the copy source. + :param source_url: Specify a URL to the copy source. Required. :type source_url: str - :param content_length: The length of the request. - :type content_length: long - :param source_range: Bytes of source data in the specified range. + :param content_length: The length of the request. Required. + :type content_length: int + :param source_range: Bytes of source data in the specified range. Default value is None. :type source_range: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray + from the copy source. Default value is None. + :type source_content_md5: bytes :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. - :type source_contentcrc64: bytearray + read from the copy source. Default value is None. + :type source_contentcrc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + by the service. Default value is None. + :type transactional_content_md5: bytes :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param append_position_access_conditions: Parameter group. - :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions - :param modified_access_conditions: Parameter group. + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _lease_id = None _max_size = None @@ -485,225 +532,207 @@ async def append_block_from_url( _source_if_unmodified_since = None _source_if_match = None _source_if_none_match = None - if append_position_access_conditions is not None: - _max_size = append_position_access_conditions.max_size - _append_position = append_position_access_conditions.append_position if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id + if append_position_access_conditions is not None: + _append_position = append_position_access_conditions.append_position + _max_size = append_position_access_conditions.max_size if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match - comp = "appendblock" - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_append_block_from_url_request( + url=self._config.url, + source_url=source_url, + content_length=content_length, + source_range=source_range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + max_size=_max_size, + append_position=_append_position, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.append_block_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') - if source_range is not None: - header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if source_contentcrc64 is not None: - header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _max_size is not None: - header_parameters['x-ms-blob-condition-maxsize'] = self._serialize.header("max_size", _max_size, 'long') - if _append_position is not None: - header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-append-offset"] = self._deserialize( + "str", response.headers.get("x-ms-blob-append-offset") + ) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - append_block_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def seal( + @distributed_trace_async + async def seal( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - append_position_access_conditions: Optional["_models.AppendPositionAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version 2019-12-12 version or later. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param append_position_access_conditions: Parameter group. - :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["seal"] = kwargs.pop("comp", _params.pop("comp", "seal")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _append_position = None - if append_position_access_conditions is not None: - _append_position = append_position_access_conditions.append_position if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match - comp = "seal" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if append_position_access_conditions is not None: + _append_position = append_position_access_conditions.append_position + + _request = build_seal_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + append_position=_append_position, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.seal.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _append_position is not None: - header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) if cls: - return cls(pipeline_response, None, response_headers) - - seal.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_blob_operations.py index 687bcd30dad6..94f1b103bd71 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,40 +7,80 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, AsyncIterator, Callable, Dict, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ... import models as _models - -T = TypeVar('T') +from ...operations._blob_operations import ( + build_abort_copy_from_url_request, + build_acquire_lease_request, + build_break_lease_request, + build_change_lease_request, + build_copy_from_url_request, + build_create_snapshot_request, + build_delete_immutability_policy_request, + build_delete_request, + build_download_request, + build_get_account_info_request, + build_get_properties_request, + build_get_tags_request, + build_query_request, + build_release_lease_request, + build_renew_lease_request, + build_set_expiry_request, + build_set_http_headers_request, + build_set_immutability_policy_request, + build_set_legal_hold_request, + build_set_metadata_request, + build_set_tags_request, + build_set_tier_request, + build_start_copy_from_url_request, + build_undelete_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class BlobOperations: - """BlobOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class BlobOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace_async async def download( self, snapshot: Optional[str] = None, @@ -48,469 +89,596 @@ async def download( range: Optional[str] = None, range_get_content_md5: Optional[bool] = None, range_get_content_crc64: Optional[bool] = None, - encryption_algorithm: Optional[str] = "AES256", + structured_body_type: Optional[str] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> IO: + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + # pylint: disable=line-too-long """The Download operation reads or downloads a blob from the system, including its metadata and properties. You can also call Download to read a snapshot. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str :param range_get_content_md5: When set to true and specified together with the Range, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB - in size. + in size. Default value is None. :type range_get_content_md5: bool :param range_get_content_crc64: When set to true and specified together with the Range, the service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 - MB in size. + MB in size. Default value is None. :type range_get_content_crc64: bool - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + :param structured_body_type: Specifies the response content should be returned as a structured + message and specifies the message schema version and properties. Default value is None. + :type structured_body_type: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IO, or the result of cls(response) - :rtype: IO - :raises: ~azure.core.exceptions.HttpResponseError + :return: AsyncIterator[bytes] or the result of cls(response) + :rtype: AsyncIterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[IO] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_download_request( + url=self._config.url, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + range=range, + lease_id=_lease_id, + range_get_content_md5=range_get_content_md5, + range_get_content_crc64=range_get_content_crc64, + structured_body_type=structured_body_type, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.download.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if range_get_content_md5 is not None: - header_parameters['x-ms-range-get-content-md5'] = self._serialize.header("range_get_content_md5", range_get_content_md5, 'bool') - if range_get_content_crc64 is not None: - header_parameters['x-ms-range-get-content-crc64'] = self._serialize.header("range_get_content_crc64", range_get_content_crc64, 'bool') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} if response.status_code == 200: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) - response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) - response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) + response_headers["x-ms-structured-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-structured-content-length") + ) if response.status_code == 206: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) - response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) - response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) + response_headers["x-ms-structured-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-structured-content-length") + ) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - download.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore - async def get_properties( + @distributed_trace_async + async def get_properties( # pylint: disable=inconsistent-return-statements self, snapshot: Optional[str] = None, version_id: Optional[str] = None, timeout: Optional[int] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system properties for the blob. It does not return the content of the blob. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_get_properties_request( + url=self._config.url, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.head(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-creation-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-creation-time')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) - response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-incremental-copy']=self._deserialize('bool', response.headers.get('x-ms-incremental-copy')) - response_headers['x-ms-copy-destination-snapshot']=self._deserialize('str', response.headers.get('x-ms-copy-destination-snapshot')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-access-tier']=self._deserialize('str', response.headers.get('x-ms-access-tier')) - response_headers['x-ms-access-tier-inferred']=self._deserialize('bool', response.headers.get('x-ms-access-tier-inferred')) - response_headers['x-ms-archive-status']=self._deserialize('str', response.headers.get('x-ms-archive-status')) - response_headers['x-ms-access-tier-change-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-access-tier-change-time')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) - response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) - response_headers['x-ms-expiry-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-expiry-time')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) - response_headers['x-ms-rehydrate-priority']=self._deserialize('str', response.headers.get('x-ms-rehydrate-priority')) - response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-incremental-copy"] = self._deserialize( + "bool", response.headers.get("x-ms-incremental-copy") + ) + response_headers["x-ms-copy-destination-snapshot"] = self._deserialize( + "str", response.headers.get("x-ms-copy-destination-snapshot") + ) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-access-tier"] = self._deserialize("str", response.headers.get("x-ms-access-tier")) + response_headers["x-ms-access-tier-inferred"] = self._deserialize( + "bool", response.headers.get("x-ms-access-tier-inferred") + ) + response_headers["x-ms-archive-status"] = self._deserialize("str", response.headers.get("x-ms-archive-status")) + response_headers["x-ms-access-tier-change-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-access-tier-change-time") + ) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-rehydrate-priority"] = self._deserialize( + "str", response.headers.get("x-ms-rehydrate-priority") + ) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - get_properties.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def delete( + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements self, snapshot: Optional[str] = None, version_id: Optional[str] = None, timeout: Optional[int] = None, - delete_snapshots: Optional[Union[str, "_models.DeleteSnapshotsOptionType"]] = None, + delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, request_id_parameter: Optional[str] = None, - blob_delete_type: Optional[str] = "Permanent", - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + blob_delete_type: Literal["Permanent"] = "Permanent", + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is permanently removed from the storage account. If the storage account's soft delete feature is enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible immediately. However, the blob service retains the blob or snapshot for the number of days - specified by the DeleteRetentionPolicy section of [Storage service properties] (Set-Blob- - Service-Properties.md). After the specified number of days has passed, the blob's data is - permanently removed from the storage account. Note that you continue to be charged for the - soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify - the "include=deleted" query parameter to discover which blobs and snapshots have been soft - deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other + specified by the DeleteRetentionPolicy section of [Storage service properties] + (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's + data is permanently removed from the storage account. Note that you continue to be charged for + the soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and + specify the "include=deleted" query parameter to discover which blobs and snapshots have been + soft deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 (ResourceNotFound). :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param delete_snapshots: Required if the blob has associated snapshots. Specify one of the following two options: include: Delete the base blob and all of its snapshots. only: Delete - only the blob's snapshots and not the blob itself. + only the blob's snapshots and not the blob itself. Known values are: "include" and "only". + Default value is None. :type delete_snapshots: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param blob_delete_type: Optional. Only possible value is 'permanent', which specifies to - permanently delete a blob if blob soft delete is enabled. + permanently delete a blob if blob soft delete is enabled. Known values are "Permanent" and + None. Default value is "Permanent". :type blob_delete_type: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -520,793 +688,634 @@ async def delete( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_delete_request( + url=self._config.url, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + lease_id=_lease_id, + delete_snapshots=delete_snapshots, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_delete_type=blob_delete_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if blob_delete_type is not None: - query_parameters['deletetype'] = self._serialize.query("blob_delete_type", blob_delete_type, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if delete_snapshots is not None: - header_parameters['x-ms-delete-snapshots'] = self._serialize.header("delete_snapshots", delete_snapshots, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - delete.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def set_access_control( - self, - timeout: Optional[int] = None, - owner: Optional[str] = None, - group: Optional[str] = None, - posix_permissions: Optional[str] = None, - posix_acl: Optional[str] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + @distributed_trace_async + async def undelete( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - """Set the owner, group, permissions, or access control list for a blob. + # pylint: disable=line-too-long + """Undelete a blob that was previously soft deleted. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param owner: Optional. The owner of the blob or directory. - :type owner: str - :param group: Optional. The owning group of the blob or directory. - :type group: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". - :type posix_acl: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - action = "setAccessControl" - accept = "application/xml" + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_undelete_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.set_access_control.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['action'] = self._serialize.query("action", action, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if owner is not None: - header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str') - if group is not None: - header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_acl is not None: - header_parameters['x-ms-acl'] = self._serialize.header("posix_acl", posix_acl, 'str') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.patch(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def get_access_control( + @distributed_trace_async + async def set_expiry( # pylint: disable=inconsistent-return-statements self, + expiry_options: Union[str, _models.BlobExpiryOptions], timeout: Optional[int] = None, - upn: Optional[bool] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + expires_on: Optional[str] = None, + **kwargs: Any ) -> None: - """Get the owner, group, permissions, or access control list for a blob. + # pylint: disable=line-too-long + """Sets the time a blob will expire and be deleted. + :param expiry_options: Required. Indicates mode of the expiry time. Known values are: + "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Required. + :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If - "true", the identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response - headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If - "false", the values will be returned as Azure Active Directory Object IDs. The default value is - false. - :type upn: bool :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param expires_on: The time to set the blob to expiry. Default value is None. + :type expires_on: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - action = "getAccessControl" - accept = "application/xml" + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_set_expiry_request( + url=self._config.url, + expiry_options=expiry_options, + timeout=timeout, + request_id_parameter=request_id_parameter, + expires_on=expires_on, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_access_control.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['action'] = self._serialize.query("action", action, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if upn is not None: - query_parameters['upn'] = self._serialize.query("upn", upn, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.head(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-owner']=self._deserialize('str', response.headers.get('x-ms-owner')) - response_headers['x-ms-group']=self._deserialize('str', response.headers.get('x-ms-group')) - response_headers['x-ms-permissions']=self._deserialize('str', response.headers.get('x-ms-permissions')) - response_headers['x-ms-acl']=self._deserialize('str', response.headers.get('x-ms-acl')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - get_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def rename( + @distributed_trace_async + async def set_http_headers( # pylint: disable=inconsistent-return-statements self, - rename_source: str, timeout: Optional[int] = None, - path_rename_mode: Optional[Union[str, "_models.PathRenameMode"]] = None, - directory_properties: Optional[str] = None, - posix_permissions: Optional[str] = None, - posix_umask: Optional[str] = None, - source_lease_id: Optional[str] = None, request_id_parameter: Optional[str] = None, - directory_http_headers: Optional["_models.DirectoryHttpHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - **kwargs + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: - """Rename a blob/file. By default, the destination is overwritten and if the destination already - exists and has a lease the lease is broken. This operation supports conditional HTTP requests. - For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. To fail if the destination already exists, use a conditional - request with If-None-Match: "*". - - :param rename_source: The file or directory to be renamed. The value must have the following - format: "/{filesysystem}/{path}". If "x-ms-properties" is specified, the properties will - overwrite the existing properties; otherwise, the existing properties will be preserved. - :type rename_source: str + # pylint: disable=line-too-long + """The Set HTTP Headers operation sets system properties on the blob. + :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param path_rename_mode: Determines the behavior of the rename operation. - :type path_rename_mode: str or ~azure.storage.blob.models.PathRenameMode - :param directory_properties: Optional. User-defined properties to be stored with the file or - directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", - where each value is base64 encoded. - :type directory_properties: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_umask: Only valid if Hierarchical Namespace is enabled for the account. This umask - restricts permission settings for file and directory, and will only be applied when default Acl - does not exist in parent directory. If the umask bit has set, it means that the corresponding - permission will be disabled. Otherwise the corresponding permission will be determined by the - permission. A 4-digit octal notation (e.g. 0022) is supported here. If no umask was specified, - a default umask - 0027 will be used. - :type posix_umask: str - :param source_lease_id: A lease ID for the source path. If specified, the source path must have - an active lease and the lease ID must match. - :type source_lease_id: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param directory_http_headers: Parameter group. - :type directory_http_headers: ~azure.storage.blob.models.DirectoryHttpHeaders - :param lease_access_conditions: Parameter group. + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _cache_control = None - _content_type = None - _content_encoding = None - _content_language = None - _content_disposition = None + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _blob_cache_control = None + _blob_content_type = None + _blob_content_md5 = None + _blob_content_encoding = None + _blob_content_language = None _lease_id = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - if directory_http_headers is not None: - _cache_control = directory_http_headers.cache_control - _content_type = directory_http_headers.content_type - _content_encoding = directory_http_headers.content_encoding - _content_language = directory_http_headers.content_language - _content_disposition = directory_http_headers.content_disposition + _if_tags = None + _blob_content_disposition = None + if blob_http_headers is not None: + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_content_type = blob_http_headers.blob_content_type if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match - if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_none_match = source_modified_access_conditions.source_if_none_match - accept = "application/xml" + _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_set_http_headers_request( + url=self._config.url, + timeout=timeout, + blob_cache_control=_blob_cache_control, + blob_content_type=_blob_content_type, + blob_content_md5=_blob_content_md5, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_content_disposition=_blob_content_disposition, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.rename.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if path_rename_mode is not None: - query_parameters['mode'] = self._serialize.query("path_rename_mode", path_rename_mode, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-rename-source'] = self._serialize.header("rename_source", rename_source, 'str') - if directory_properties is not None: - header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_umask is not None: - header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str') - if _cache_control is not None: - header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str') - if _content_type is not None: - header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str') - if _content_encoding is not None: - header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str') - if _content_language is not None: - header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str') - if _content_disposition is not None: - header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if source_lease_id is not None: - header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [201]: + if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - rename.metadata = {'url': '/{filesystem}/{path}'} # type: ignore - - async def undelete( + @distributed_trace_async + async def set_immutability_policy( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: - """Undelete a blob that was previously soft deleted. + # pylint: disable=line-too-long + """The Set Immutability Policy operation sets the immutability policy on the blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "undelete" - accept = "application/xml" - - # Construct URL - url = self.undelete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_set_immutability_policy_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + if_unmodified_since=_if_unmodified_since, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + snapshot=snapshot, + version_id=version_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) if cls: - return cls(pipeline_response, None, response_headers) - - undelete.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def set_expiry( + @distributed_trace_async + async def delete_immutability_policy( # pylint: disable=inconsistent-return-statements self, - expiry_options: Union[str, "_models.BlobExpiryOptions"], timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - expires_on: Optional[str] = None, - **kwargs + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any ) -> None: - """Sets the time a blob will expire and be deleted. + # pylint: disable=line-too-long + """The Delete Immutability Policy operation deletes the immutability policy on the blob. - :param expiry_options: Required. Indicates mode of the expiry time. - :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param expires_on: The time to set the blob to expiry. - :type expires_on: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "expiry" - accept = "application/xml" - - # Construct URL - url = self.set_expiry.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str') - if expires_on is not None: - header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_delete_immutability_policy_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_expiry.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def set_http_headers( + @distributed_trace_async + async def set_legal_hold( # pylint: disable=inconsistent-return-statements self, + legal_hold: bool, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any ) -> None: - """The Set HTTP Headers operation sets system properties on the blob. + # pylint: disable=line-too-long + """The Set Legal Hold operation sets a legal hold on the blob. + :param legal_hold: Specified if a legal hold should be set on the blob. Required. + :type legal_hold: bool :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_http_headers: Parameter group. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _blob_cache_control = None - _blob_content_type = None - _blob_content_md5 = None - _blob_content_encoding = None - _blob_content_language = None - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _blob_content_disposition = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_type = blob_http_headers.blob_content_type - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_disposition = blob_http_headers.blob_content_disposition - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - comp = "properties" - accept = "application/xml" + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["legalhold"] = kwargs.pop("comp", _params.pop("comp", "legalhold")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_set_legal_hold_request( + url=self._config.url, + legal_hold=legal_hold, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.set_http_headers.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - set_http_headers.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def set_metadata( + @distributed_trace_async + async def set_metadata( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", + metadata: Optional[Dict[str, str]] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more name-value pairs. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -1314,681 +1323,663 @@ async def set_metadata( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "metadata" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_set_metadata_request( + url=self._config.url, + timeout=timeout, + metadata=metadata, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.set_metadata.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - set_metadata.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def acquire_lease( + @distributed_trace_async + async def acquire_lease( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, duration: Optional[int] = None, proposed_lease_id: Optional[str] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease - duration cannot be changed using renew or change. + duration cannot be changed using renew or change. Default value is None. :type duration: int :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Default value is None. :type proposed_lease_id: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "acquire" - accept = "application/xml" - - # Construct URL - url = self.acquire_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if duration is not None: - header_parameters['x-ms-lease-duration'] = self._serialize.header("duration", duration, 'int') - if proposed_lease_id is not None: - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_acquire_lease_request( + url=self._config.url, + timeout=timeout, + duration=duration, + proposed_lease_id=proposed_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - acquire_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def release_lease( + @distributed_trace_async + async def release_lease( # pylint: disable=inconsistent-return-statements self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "release" - accept = "application/xml" - - # Construct URL - url = self.release_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_release_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - release_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def renew_lease( + @distributed_trace_async + async def renew_lease( # pylint: disable=inconsistent-return-statements self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "renew" - accept = "application/xml" - - # Construct URL - url = self.renew_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_renew_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - renew_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def change_lease( + @distributed_trace_async + async def change_lease( # pylint: disable=inconsistent-return-statements self, lease_id: str, proposed_lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Required. :type proposed_lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "change" - accept = "application/xml" - - # Construct URL - url = self.change_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_change_lease_request( + url=self._config.url, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - change_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def break_lease( + @distributed_trace_async + async def break_lease( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, break_period: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param break_period: For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for - longer than the break period. If this header does not appear with a break operation, a fixed- - duration lease breaks after the remaining lease period elapses, and an infinite lease breaks - immediately. + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. Default value is None. :type break_period: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "break" - accept = "application/xml" - - # Construct URL - url = self.break_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if break_period is not None: - header_parameters['x-ms-lease-break-period'] = self._serialize.header("break_period", break_period, 'int') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_break_lease_request( + url=self._config.url, + timeout=timeout, + break_period=break_period, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - break_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def create_snapshot( + @distributed_trace_async + async def create_snapshot( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", + metadata: Optional[Dict[str, str]] = None, request_id_parameter: Optional[str] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Create Snapshot operation creates a read-only snapshot of a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -1996,36 +1987,41 @@ async def create_snapshot( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -2034,113 +2030,103 @@ async def create_snapshot( _if_tags = None _lease_id = None if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "snapshot" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + _request = build_create_snapshot_request( + url=self._config.url, + timeout=timeout, + metadata=metadata, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.create_snapshot.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-snapshot']=self._deserialize('str', response.headers.get('x-ms-snapshot')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers["x-ms-snapshot"] = self._deserialize("str", response.headers.get("x-ms-snapshot")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - create_snapshot.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def start_copy_from_url( + @distributed_trace_async + async def start_copy_from_url( # pylint: disable=inconsistent-return-statements self, copy_source: str, timeout: Optional[int] = None, - metadata: Optional[str] = None, - tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, - rehydrate_priority: Optional[Union[str, "_models.RehydratePriority"]] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, request_id_parameter: Optional[str] = None, blob_tags_string: Optional[str] = None, seal_blob: Optional[bool] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Start Copy From URL operation copies a blob or an internet resource to a new blob. :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -2148,38 +2134,57 @@ async def start_copy_from_url( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived - blob. + blob. Known values are: "High" and "Standard". Default value is None. :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str :param seal_blob: Overrides the sealed state of the destination blob. Service version - 2019-12-12 and newer. + 2019-12-12 and newer. Default value is None. :type seal_blob: bool - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :param modified_access_conditions: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + _source_if_modified_since = None _source_if_unmodified_since = None _source_if_match = None @@ -2191,124 +2196,113 @@ async def start_copy_from_url( _if_none_match = None _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_tags = source_modified_access_conditions.source_if_tags - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + if modified_access_conditions is not None: + _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + _request = build_start_copy_from_url_request( + url=self._config.url, + copy_source=copy_source, + timeout=timeout, + metadata=metadata, + tier=tier, + rehydrate_priority=rehydrate_priority, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + source_if_tags=_source_if_tags, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + seal_blob=seal_blob, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.start_copy_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if rehydrate_priority is not None: - header_parameters['x-ms-rehydrate-priority'] = self._serialize.header("rehydrate_priority", rehydrate_priority, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - if _source_if_tags is not None: - header_parameters['x-ms-source-if-tags'] = self._serialize.header("source_if_tags", _source_if_tags, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - if seal_blob is not None: - header_parameters['x-ms-seal-blob'] = self._serialize.header("seal_blob", seal_blob, 'bool') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) if cls: - return cls(pipeline_response, None, response_headers) - - start_copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def copy_from_url( + @distributed_trace_async + async def copy_from_url( # pylint: disable=inconsistent-return-statements self, copy_source: str, timeout: Optional[int] = None, - metadata: Optional[str] = None, - tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, request_id_parameter: Optional[str] = None, - source_content_md5: Optional[bytearray] = None, + source_content_md5: Optional[bytes] = None, blob_tags_string: Optional[str] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return a response until the copy is complete. :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -2316,35 +2310,66 @@ async def copy_from_url( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + from the copy source. Default value is None. + :type source_content_md5: bytes + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :param modified_access_conditions: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and + "COPY". Default value is None. + :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + x_ms_requires_sync: Literal["true"] = kwargs.pop( + "x_ms_requires_sync", _headers.pop("x-ms-requires-sync", "true") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + _source_if_modified_since = None _source_if_unmodified_since = None _source_if_match = None @@ -2355,781 +2380,829 @@ async def copy_from_url( _if_none_match = None _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id + _encryption_scope = None + if source_modified_access_conditions is not None: + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_none_match = source_modified_access_conditions.source_if_none_match + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_none_match = source_modified_access_conditions.source_if_none_match - x_ms_requires_sync = "true" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + + _request = build_copy_from_url_request( + url=self._config.url, + copy_source=copy_source, + timeout=timeout, + metadata=metadata, + tier=tier, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + copy_source_authorization=copy_source_authorization, + encryption_scope=_encryption_scope, + copy_source_tags=copy_source_tags, + x_ms_requires_sync=x_ms_requires_sync, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.copy_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-requires-sync'] = self._serialize.header("x_ms_requires_sync", x_ms_requires_sync, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def abort_copy_from_url( + @distributed_trace_async + async def abort_copy_from_url( # pylint: disable=inconsistent-return-statements self, copy_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination blob with zero length and full metadata. :param copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy - Blob operation. + Blob operation. Required. :type copy_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["copy"] = kwargs.pop("comp", _params.pop("comp", "copy")) + copy_action_abort_constant: Literal["abort"] = kwargs.pop( + "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id - comp = "copy" - copy_action_abort_constant = "abort" - accept = "application/xml" - - # Construct URL - url = self.abort_copy_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['copyid'] = self._serialize.query("copy_id", copy_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-copy-action'] = self._serialize.header("copy_action_abort_constant", copy_action_abort_constant, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_abort_copy_from_url_request( + url=self._config.url, + copy_id=copy_id, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + comp=comp, + copy_action_abort_constant=copy_action_abort_constant, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - abort_copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def set_tier( + @distributed_trace_async + async def set_tier( # pylint: disable=inconsistent-return-statements self, - tier: Union[str, "_models.AccessTierRequired"], + tier: Union[str, _models.AccessTierRequired], snapshot: Optional[str] = None, version_id: Optional[str] = None, timeout: Optional[int] = None, - rehydrate_priority: Optional[Union[str, "_models.RehydratePriority"]] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium storage account and on a block blob in a blob storage account (locally redundant storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's ETag. - :param tier: Indicates the tier to be set on the blob. + :param tier: Indicates the tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and "Cold". + Required. :type tier: str or ~azure.storage.blob.models.AccessTierRequired :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived - blob. + blob. Known values are: "High" and "Standard". Default value is None. :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tier"] = kwargs.pop("comp", _params.pop("comp", "tier")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_tags = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "tier" - accept = "application/xml" - # Construct URL - url = self.set_tier.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if rehydrate_priority is not None: - header_parameters['x-ms-rehydrate-priority'] = self._serialize.header("rehydrate_priority", rehydrate_priority, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_set_tier_request( + url=self._config.url, + tier=tier, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + rehydrate_priority=rehydrate_priority, + request_id_parameter=request_id_parameter, + lease_id=_lease_id, + if_tags=_if_tags, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - if response.status_code == 200: - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - - if response.status_code == 202: - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - set_tier.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def get_account_info( - self, - **kwargs + @distributed_trace_async + async def get_account_info( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Returns the sku name and account kind. - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "account" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_account_info.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_get_account_info_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) - response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) if cls: - return cls(pipeline_response, None, response_headers) - - get_account_info.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def query( self, snapshot: Optional[str] = None, timeout: Optional[int] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - query_request: Optional["_models.QueryRequest"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> IO: + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + query_request: Optional[_models.QueryRequest] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + # pylint: disable=line-too-long """The Query operation enables users to select/project on blob data by providing simple query expressions. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param query_request: the query request. - :type query_request: ~azure.storage.blob.models.QueryRequest - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IO, or the result of cls(response) - :rtype: IO - :raises: ~azure.core.exceptions.HttpResponseError + :param query_request: the query request. Default value is None. + :type query_request: ~azure.storage.blob.models.QueryRequest + :return: AsyncIterator[bytes] or the result of cls(response) + :rtype: AsyncIterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[IO] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["query"] = kwargs.pop("comp", _params.pop("comp", "query")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "query" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.query.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] + _if_unmodified_since = modified_access_conditions.if_unmodified_since if query_request is not None: - body_content = self._serialize.body(query_request, 'QueryRequest', is_xml=True) + _content = self._serialize.body(query_request, "QueryRequest", is_xml=True) else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs) + _content = None + + _request = build_query_request( + url=self._config.url, + snapshot=snapshot, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} if response.status_code == 200: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) if response.status_code == 206: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - query.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def get_tags( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, snapshot: Optional[str] = None, version_id: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs - ) -> "_models.BlobTags": + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> _models.BlobTags: + # pylint: disable=line-too-long """The Get Tags operation enables users to get the tags associated with a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: BlobTags, or the result of cls(response) + :return: BlobTags or the result of cls(response) :rtype: ~azure.storage.blob.models.BlobTags - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobTags"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) + cls: ClsType[_models.BlobTags] = kwargs.pop("cls", None) + _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "tags" - accept = "application/xml" + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + _request = build_get_tags_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + if_tags=_if_tags, + lease_id=_lease_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_tags.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('BlobTags', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("BlobTags", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_tags.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore - async def set_tags( + @distributed_trace_async + async def set_tags( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, version_id: Optional[str] = None, - transactional_content_md5: Optional[bytearray] = None, - transactional_content_crc64: Optional[bytearray] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, request_id_parameter: Optional[str] = None, - tags: Optional["_models.BlobTags"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + tags: Optional[_models.BlobTags] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Set Tags operation enables users to set tags on a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param tags: Blob tags. - :type tags: ~azure.storage.blob.models.BlobTags - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param tags: Blob tags. Default value is None. + :type tags: ~azure.storage.blob.models.BlobTags + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "tags" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.set_tags.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if tags is not None: - body_content = self._serialize.body(tags, 'BlobTags', is_xml=True) + _content = self._serialize.body(tags, "BlobTags", is_xml=True) else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _content = None + + _request = build_set_tags_request( + url=self._config.url, + timeout=timeout, + version_id=version_id, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + request_id_parameter=request_id_parameter, + if_tags=_if_tags, + lease_id=_lease_id, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_tags.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_block_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_block_blob_operations.py index 67c90b0bf9d4..45419c03cd47 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_block_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_block_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,115 +7,168 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ... import models as _models - -T = TypeVar('T') +from ...operations._block_blob_operations import ( + build_commit_block_list_request, + build_get_block_list_request, + build_put_blob_from_url_request, + build_stage_block_from_url_request, + build_stage_block_request, + build_upload_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class BlockBlobOperations: - """BlockBlobOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class BlockBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`block_blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def upload( + @distributed_trace_async + async def upload( # pylint: disable=inconsistent-return-statements self, content_length: int, - body: IO, + body: IO[bytes], timeout: Optional[int] = None, - transactional_content_md5: Optional[bytearray] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", - tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + transactional_content_md5: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, request_id_parameter: Optional[str] = None, blob_tags_string: Optional[str] = None, - blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + transactional_content_crc64: Optional[bytes] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Upload Block Blob operation updates the content of an existing block blob. Updating an existing block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a partial update of the content of a block blob, use the Put Block List operation. - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytes + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -124,6 +178,7 @@ async def upload( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -131,209 +186,215 @@ async def upload( _if_none_match = None _if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - blob_type = "BlockBlob" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.upload.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = body + + _request = build_upload_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + transactional_content_crc64=transactional_content_crc64, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + blob_type=blob_type, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) - - upload.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def put_blob_from_url( + @distributed_trace_async + async def put_blob_from_url( # pylint: disable=inconsistent-return-statements self, content_length: int, copy_source: str, timeout: Optional[int] = None, - transactional_content_md5: Optional[bytearray] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", - tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + transactional_content_md5: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, request_id_parameter: Optional[str] = None, - source_content_md5: Optional[bytearray] = None, + source_content_md5: Optional[bytes] = None, blob_tags_string: Optional[str] = None, copy_source_blob_properties: Optional[bool] = None, - blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - **kwargs + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are not supported with Put Blob from URL; the content of an existing blob is overwritten with the content of the new blob. To perform partial updates to a block blob’s contents using a source URL, use the Put Block from URL API in conjunction with Put Block List. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + from the copy source. Default value is None. + :type source_content_md5: bytes + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str :param copy_source_blob_properties: Optional, default is true. Indicates if properties from - the source blob should be copied. + the source blob should be copied. Default value is None. :type copy_source_blob_properties: bool - :param blob_http_headers: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and + "COPY". Default value is None. + :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -343,6 +404,7 @@ async def put_blob_from_url( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -355,343 +417,344 @@ async def put_blob_from_url( _source_if_none_match = None _source_if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_tags = source_modified_access_conditions.source_if_tags - blob_type = "BlockBlob" - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_put_blob_from_url_request( + url=self._config.url, + content_length=content_length, + copy_source=copy_source, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + source_if_tags=_source_if_tags, + request_id_parameter=request_id_parameter, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + copy_source_blob_properties=copy_source_blob_properties, + copy_source_authorization=copy_source_authorization, + copy_source_tags=copy_source_tags, + blob_type=blob_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.put_blob_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - if _source_if_tags is not None: - header_parameters['x-ms-source-if-tags'] = self._serialize.header("source_if_tags", _source_if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - if copy_source_blob_properties is not None: - header_parameters['x-ms-copy-source-blob-properties'] = self._serialize.header("copy_source_blob_properties", copy_source_blob_properties, 'bool') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - put_blob_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def stage_block( + @distributed_trace_async + async def stage_block( # pylint: disable=inconsistent-return-statements self, block_id: str, content_length: int, - body: IO, - transactional_content_md5: Optional[bytearray] = None, - transactional_content_crc64: Optional[bytearray] = None, + body: IO[bytes], + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, timeout: Optional[int] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - **kwargs + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Stage Block operation creates a new block to be committed as part of a blob. :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the string must be less than or equal to 64 bytes in size. For a given blob, the length of the - value specified for the blockid parameter must be the same size for each block. + value specified for the blockid parameter must be the same size for each block. Required. :type block_id: str - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - comp = "block" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.stage_block.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['blockid'] = self._serialize.query("block_id", block_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _content = body + + _request = build_stage_block_request( + url=self._config.url, + block_id=block_id, + content_length=content_length, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + request_id_parameter=request_id_parameter, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - stage_block.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def stage_block_from_url( + @distributed_trace_async + async def stage_block_from_url( # pylint: disable=inconsistent-return-statements self, block_id: str, content_length: int, source_url: str, source_range: Optional[str] = None, - source_content_md5: Optional[bytearray] = None, - source_contentcrc64: Optional[bytearray] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, timeout: Optional[int] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - **kwargs + copy_source_authorization: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Stage Block operation creates a new block to be committed as part of a blob where the contents are read from a URL. :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the string must be less than or equal to 64 bytes in size. For a given blob, the length of the - value specified for the blockid parameter must be the same size for each block. + value specified for the blockid parameter must be the same size for each block. Required. :type block_id: str - :param content_length: The length of the request. - :type content_length: long - :param source_url: Specify a URL to the copy source. + :param content_length: The length of the request. Required. + :type content_length: int + :param source_url: Specify a URL to the copy source. Required. :type source_url: str - :param source_range: Bytes of source data in the specified range. + :param source_range: Bytes of source data in the specified range. Default value is None. :type source_range: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray + from the copy source. Default value is None. + :type source_content_md5: bytes :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. - :type source_contentcrc64: bytearray + read from the copy source. Default value is None. + :type source_contentcrc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _lease_id = None _source_if_modified_since = None @@ -699,6 +762,7 @@ async def stage_block_from_url( _source_if_match = None _source_if_none_match = None if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: @@ -706,103 +770,96 @@ async def stage_block_from_url( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match - comp = "block" - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_stage_block_from_url_request( + url=self._config.url, + block_id=block_id, + content_length=content_length, + source_url=source_url, + source_range=source_range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.stage_block_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['blockid'] = self._serialize.query("block_id", block_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') - if source_range is not None: - header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if source_contentcrc64 is not None: - header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - stage_block_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def commit_block_list( + @distributed_trace_async + async def commit_block_list( # pylint: disable=inconsistent-return-statements self, - blocks: "_models.BlockLookupList", + blocks: _models.BlockLookupList, timeout: Optional[int] = None, - transactional_content_md5: Optional[bytearray] = None, - transactional_content_crc64: Optional[bytearray] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", - tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, request_id_parameter: Optional[str] = None, blob_tags_string: Optional[str] = None, - blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Commit Block List operation writes a blob by specifying the list of block IDs that make up the blob. In order to be written as part of a blob, a block must have been successfully written to the server in a prior Put Block operation. You can call Put Block List to update a blob by @@ -811,58 +868,75 @@ async def commit_block_list( or from the uncommitted block list, or to commit the most recently uploaded version of the block, whichever list it may belong to. - :param blocks: + :param blocks: Blob Blocks. Required. :type blocks: ~azure.storage.blob.models.BlockLookupList :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_cache_control = None _blob_content_type = None _blob_content_encoding = None @@ -872,6 +946,7 @@ async def commit_block_list( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -880,224 +955,210 @@ async def commit_block_list( _if_tags = None if blob_http_headers is not None: _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_type = blob_http_headers.blob_content_type + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "blocklist" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.commit_block_list.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(blocks, 'BlockLookupList', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = self._serialize.body(blocks, "BlockLookupList", is_xml=True) + + _request = build_commit_block_list_request( + url=self._config.url, + timeout=timeout, + blob_cache_control=_blob_cache_control, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - commit_block_list.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def get_block_list( self, snapshot: Optional[str] = None, - list_type: Union[str, "_models.BlockListType"] = "committed", + list_type: Union[str, _models.BlockListType] = "committed", timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> "_models.BlockList": + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> _models.BlockList: + # pylint: disable=line-too-long """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block blob. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param list_type: Specifies whether to return the list of committed blocks, the list of - uncommitted blocks, or both lists together. + uncommitted blocks, or both lists together. Known values are: "committed", "uncommitted", and + "all". Default value is "committed". :type list_type: str or ~azure.storage.blob.models.BlockListType :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: BlockList, or the result of cls(response) + :return: BlockList or the result of cls(response) :rtype: ~azure.storage.blob.models.BlockList - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.BlockList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) + cls: ClsType[_models.BlockList] = kwargs.pop("cls", None) + _lease_id = None _if_tags = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "blocklist" - accept = "application/xml" - # Construct URL - url = self.get_block_list.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - query_parameters['blocklisttype'] = self._serialize.query("list_type", list_type, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_get_block_list_request( + url=self._config.url, + snapshot=snapshot, + list_type=list_type, + timeout=timeout, + lease_id=_lease_id, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('BlockList', pipeline_response) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("BlockList", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_block_list.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_container_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_container_operations.py index ed32bc96241b..c2bc375a197f 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_container_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_container_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,56 +6,91 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, AsyncIterator, Callable, Dict, IO, List, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ... import models as _models - -T = TypeVar('T') +from ...operations._container_operations import ( + build_acquire_lease_request, + build_break_lease_request, + build_change_lease_request, + build_create_request, + build_delete_request, + build_filter_blobs_request, + build_get_access_policy_request, + build_get_account_info_request, + build_get_properties_request, + build_list_blob_flat_segment_request, + build_list_blob_hierarchy_segment_request, + build_release_lease_request, + build_rename_request, + build_renew_lease_request, + build_restore_request, + build_set_access_policy_request, + build_set_metadata_request, + build_submit_batch_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ContainerOperations: - """ContainerOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class ContainerOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`container` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def create( + @distributed_trace_async + async def create( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, - metadata: Optional[str] = None, - access: Optional[Union[str, "_models.PublicAccessType"]] = None, + metadata: Optional[Dict[str, str]] = None, + access: Optional[Union[str, _models.PublicAccessType]] = None, request_id_parameter: Optional[str] = None, - container_cpk_scope_info: Optional["_models.ContainerCpkScopeInfo"] = None, - **kwargs + container_cpk_scope_info: Optional[_models.ContainerCpkScopeInfo] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """creates a new container under the specified account. If the container with the same name already exists, the operation fails. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -62,207 +98,227 @@ async def create( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str + information. Default value is None. + :type metadata: dict[str, str] :param access: Specifies whether data in the container may be accessed publicly and the level - of access. + of access. Known values are: "container" and "blob". Default value is None. :type access: str or ~azure.storage.blob.models.PublicAccessType :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param container_cpk_scope_info: Parameter group. + :param container_cpk_scope_info: Parameter group. Default value is None. :type container_cpk_scope_info: ~azure.storage.blob.models.ContainerCpkScopeInfo - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + cls: ClsType[None] = kwargs.pop("cls", None) + _default_encryption_scope = None _prevent_encryption_scope_override = None if container_cpk_scope_info is not None: _default_encryption_scope = container_cpk_scope_info.default_encryption_scope _prevent_encryption_scope_override = container_cpk_scope_info.prevent_encryption_scope_override - restype = "container" - accept = "application/xml" - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if access is not None: - header_parameters['x-ms-blob-public-access'] = self._serialize.header("access", access, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _default_encryption_scope is not None: - header_parameters['x-ms-default-encryption-scope'] = self._serialize.header("default_encryption_scope", _default_encryption_scope, 'str') - if _prevent_encryption_scope_override is not None: - header_parameters['x-ms-deny-encryption-scope-override'] = self._serialize.header("prevent_encryption_scope_override", _prevent_encryption_scope_override, 'bool') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_create_request( + url=self._config.url, + timeout=timeout, + metadata=metadata, + access=access, + request_id_parameter=request_id_parameter, + default_encryption_scope=_default_encryption_scope, + prevent_encryption_scope_override=_prevent_encryption_scope_override, + restype=restype, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - create.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def get_properties( + @distributed_trace_async + async def get_properties( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """returns all user-defined metadata and system properties for the specified container. The data returned does not include the container's list of blobs. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id - restype = "container" - accept = "application/xml" - # Construct URL - url = self.get_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_get_properties_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + restype=restype, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) - response_headers['x-ms-has-immutability-policy']=self._deserialize('bool', response.headers.get('x-ms-has-immutability-policy')) - response_headers['x-ms-has-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-has-legal-hold')) - response_headers['x-ms-default-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-default-encryption-scope')) - response_headers['x-ms-deny-encryption-scope-override']=self._deserialize('bool', response.headers.get('x-ms-deny-encryption-scope-override')) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-public-access"] = self._deserialize( + "str", response.headers.get("x-ms-blob-public-access") + ) + response_headers["x-ms-has-immutability-policy"] = self._deserialize( + "bool", response.headers.get("x-ms-has-immutability-policy") + ) + response_headers["x-ms-has-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-has-legal-hold")) + response_headers["x-ms-default-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-default-encryption-scope") + ) + response_headers["x-ms-deny-encryption-scope-override"] = self._deserialize( + "bool", response.headers.get("x-ms-deny-encryption-scope-override") + ) + response_headers["x-ms-immutable-storage-with-versioning-enabled"] = self._deserialize( + "bool", response.headers.get("x-ms-immutable-storage-with-versioning-enabled") + ) if cls: - return cls(pipeline_response, None, response_headers) - - get_properties.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def delete( + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """operation marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -271,69 +327,61 @@ async def delete( if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - restype = "container" - accept = "application/xml" - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_delete_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + restype=restype, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - delete.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def set_metadata( + @distributed_trace_async + async def set_metadata( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, - metadata: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """operation sets one or more user-defined name-value pairs for the specified container. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -341,210 +389,226 @@ async def set_metadata( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since - restype = "container" - comp = "metadata" - accept = "application/xml" - - # Construct URL - url = self.set_metadata.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_set_metadata_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + metadata=metadata, + if_modified_since=_if_modified_since, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_metadata.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def get_access_policy( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs - ) -> List["_models.SignedIdentifier"]: + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> List[_models.SignedIdentifier]: + # pylint: disable=line-too-long """gets the permissions for the specified container. The permissions indicate whether container data may be accessed publicly. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: list of SignedIdentifier, or the result of cls(response) + :return: list of SignedIdentifier or the result of cls(response) :rtype: list[~azure.storage.blob.models.SignedIdentifier] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[List["_models.SignedIdentifier"]] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) + cls: ClsType[List[_models.SignedIdentifier]] = kwargs.pop("cls", None) + _lease_id = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id - restype = "container" - comp = "acl" - accept = "application/xml" - - # Construct URL - url = self.get_access_policy.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_get_access_policy_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('[SignedIdentifier]', pipeline_response) + response_headers["x-ms-blob-public-access"] = self._deserialize( + "str", response.headers.get("x-ms-blob-public-access") + ) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_access_policy.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore - async def set_access_policy( + @distributed_trace_async + async def set_access_policy( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, - access: Optional[Union[str, "_models.PublicAccessType"]] = None, + access: Optional[Union[str, _models.PublicAccessType]] = None, request_id_parameter: Optional[str] = None, - container_acl: Optional[List["_models.SignedIdentifier"]] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + container_acl: Optional[List[_models.SignedIdentifier]] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """sets the permissions for the specified container. The permissions indicate whether blobs in a container may be accessed publicly. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param access: Specifies whether data in the container may be accessed publicly and the level - of access. + of access. Known values are: "container" and "blob". Default value is None. :type access: str or ~azure.storage.blob.models.PublicAccessType :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param container_acl: the acls for the container. - :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param container_acl: the acls for the container. Default value is None. + :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -553,911 +617,1199 @@ async def set_access_policy( if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - restype = "container" - comp = "acl" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.set_access_policy.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if access is not None: - header_parameters['x-ms-blob-public-access'] = self._serialize.header("access", access, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - serialization_ctxt = {'xml': {'name': 'SignedIdentifiers', 'wrapped': True, 'itemsName': 'SignedIdentifier'}} + serialization_ctxt = {"xml": {"name": "SignedIdentifiers", "wrapped": True, "itemsName": "SignedIdentifier"}} if container_acl is not None: - body_content = self._serialize.body(container_acl, '[SignedIdentifier]', is_xml=True, serialization_ctxt=serialization_ctxt) + _content = self._serialize.body( + container_acl, "[SignedIdentifier]", is_xml=True, serialization_ctxt=serialization_ctxt + ) else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _content = None + + _request = build_set_access_policy_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + access=access, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - set_access_policy.metadata = {'url': '/{containerName}'} # type: ignore - - async def restore( + @distributed_trace_async + async def restore( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, deleted_container_name: Optional[str] = None, deleted_container_version: Optional[str] = None, - **kwargs + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Restores a previously-deleted container. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param deleted_container_name: Optional. Version 2019-12-12 and later. Specifies the name of - the deleted container to restore. + the deleted container to restore. Default value is None. :type deleted_container_name: str :param deleted_container_version: Optional. Version 2019-12-12 and later. Specifies the - version of the deleted container to restore. + version of the deleted container to restore. Default value is None. :type deleted_container_version: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "container" - comp = "undelete" - accept = "application/xml" - - # Construct URL - url = self.restore.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if deleted_container_name is not None: - header_parameters['x-ms-deleted-container-name'] = self._serialize.header("deleted_container_name", deleted_container_name, 'str') - if deleted_container_version is not None: - header_parameters['x-ms-deleted-container-version'] = self._serialize.header("deleted_container_version", deleted_container_version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_restore_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + deleted_container_name=deleted_container_name, + deleted_container_version=deleted_container_version, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def rename( # pylint: disable=inconsistent-return-statements + self, + source_container_name: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + source_lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """Renames an existing container. + + :param source_container_name: Required. Specifies the name of the container to rename. + Required. + :type source_container_name: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param source_lease_id: A lease ID for the source path. If specified, the source path must have + an active lease and the lease ID must match. Default value is None. + :type source_lease_id: str + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_rename_request( + url=self._config.url, + source_container_name=source_container_name, + timeout=timeout, + request_id_parameter=request_id_parameter, + source_lease_id=source_lease_id, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def submit_batch( + self, + content_length: int, + body: IO[bytes], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + # pylint: disable=line-too-long + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: AsyncIterator[bytes] or the result of cls(response) + :rtype: AsyncIterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) + multipart_content_type: str = kwargs.pop( + "multipart_content_type", _headers.pop("Content-Type", "application/xml") + ) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _content = body + + _request = build_submit_batch_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + multipart_content_type=multipart_content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - restore.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore - async def acquire_lease( + @distributed_trace_async + async def filter_blobs( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, + **kwargs: Any + ) -> _models.FilterBlobSegment: + # pylint: disable=line-too-long + """The Filter Blobs operation enables callers to list blobs in a container whose tags match a + given search expression. Filter blobs searches within the given container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param where: Filters the results to return only to return only blobs whose tags match the + specified expression. Default value is None. + :type where: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :type include: list[str or ~azure.storage.blob.models.FilterBlobsIncludeItem] + :return: FilterBlobSegment or the result of cls(response) + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) + cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) + + _request = build_filter_blobs_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + where=where, + marker=marker, + maxresults=maxresults, + include=include, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def acquire_lease( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, duration: Optional[int] = None, proposed_lease_id: Optional[str] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease - duration cannot be changed using renew or change. + duration cannot be changed using renew or change. Default value is None. :type duration: int :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Default value is None. :type proposed_lease_id: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "acquire" - accept = "application/xml" - - # Construct URL - url = self.acquire_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if duration is not None: - header_parameters['x-ms-lease-duration'] = self._serialize.header("duration", duration, 'int') - if proposed_lease_id is not None: - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_acquire_lease_request( + url=self._config.url, + timeout=timeout, + duration=duration, + proposed_lease_id=proposed_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - acquire_lease.metadata = {'url': '/{containerName}'} # type: ignore - - async def release_lease( + @distributed_trace_async + async def release_lease( # pylint: disable=inconsistent-return-statements self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "release" - accept = "application/xml" - - # Construct URL - url = self.release_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_release_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - release_lease.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def renew_lease( + @distributed_trace_async + async def renew_lease( # pylint: disable=inconsistent-return-statements self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "renew" - accept = "application/xml" - - # Construct URL - url = self.renew_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_renew_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - renew_lease.metadata = {'url': '/{containerName}'} # type: ignore - - async def break_lease( + @distributed_trace_async + async def break_lease( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, break_period: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param break_period: For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for - longer than the break period. If this header does not appear with a break operation, a fixed- - duration lease breaks after the remaining lease period elapses, and an infinite lease breaks - immediately. + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. Default value is None. :type break_period: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "break" - accept = "application/xml" - - # Construct URL - url = self.break_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if break_period is not None: - header_parameters['x-ms-lease-break-period'] = self._serialize.header("break_period", break_period, 'int') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_break_lease_request( + url=self._config.url, + timeout=timeout, + break_period=break_period, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - break_lease.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def change_lease( + @distributed_trace_async + async def change_lease( # pylint: disable=inconsistent-return-statements self, lease_id: str, proposed_lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Required. :type proposed_lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "change" - accept = "application/xml" - - # Construct URL - url = self.change_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_change_lease_request( + url=self._config.url, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - change_lease.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def list_blob_flat_segment( self, prefix: Optional[str] = None, marker: Optional[str] = None, maxresults: Optional[int] = None, - include: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs - ) -> "_models.ListBlobsFlatSegmentResponse": + **kwargs: Any + ) -> _models.ListBlobsFlatSegmentResponse: + # pylint: disable=line-too-long """[Update] The List Blobs operation returns a list of the blobs under the specified container. :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. + specified prefix. Default value is None. :type prefix: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int :param include: Include this parameter to specify one or more datasets to include in the - response. + response. Default value is None. :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListBlobsFlatSegmentResponse, or the result of cls(response) + :return: ListBlobsFlatSegmentResponse or the result of cls(response) :rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsFlatSegmentResponse"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "container" - comp = "list" - accept = "application/xml" - - # Construct URL - url = self.list_blob_flat_segment.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if prefix is not None: - query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - if include is not None: - query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + cls: ClsType[_models.ListBlobsFlatSegmentResponse] = kwargs.pop("cls", None) + + _request = build_list_blob_flat_segment_request( + url=self._config.url, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('ListBlobsFlatSegmentResponse', pipeline_response) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("ListBlobsFlatSegmentResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - list_blob_flat_segment.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def list_blob_hierarchy_segment( self, delimiter: str, prefix: Optional[str] = None, marker: Optional[str] = None, maxresults: Optional[int] = None, - include: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs - ) -> "_models.ListBlobsHierarchySegmentResponse": + **kwargs: Any + ) -> _models.ListBlobsHierarchySegmentResponse: + # pylint: disable=line-too-long """[Update] The List Blobs operation returns a list of the blobs under the specified container. :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix element in the response body that acts as a placeholder for all blobs whose names begin with the same substring up to the appearance of the delimiter character. The delimiter may be a - single character or a string. + single character or a string. Required. :type delimiter: str :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. + specified prefix. Default value is None. :type prefix: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int :param include: Include this parameter to specify one or more datasets to include in the - response. + response. Default value is None. :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListBlobsHierarchySegmentResponse, or the result of cls(response) + :return: ListBlobsHierarchySegmentResponse or the result of cls(response) :rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsHierarchySegmentResponse"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "container" - comp = "list" - accept = "application/xml" - - # Construct URL - url = self.list_blob_hierarchy_segment.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if prefix is not None: - query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') - query_parameters['delimiter'] = self._serialize.query("delimiter", delimiter, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - if include is not None: - query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + cls: ClsType[_models.ListBlobsHierarchySegmentResponse] = kwargs.pop("cls", None) + + _request = build_list_blob_hierarchy_segment_request( + url=self._config.url, + delimiter=delimiter, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('ListBlobsHierarchySegmentResponse', pipeline_response) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - list_blob_hierarchy_segment.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore - async def get_account_info( - self, - **kwargs + @distributed_trace_async + async def get_account_info( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Returns the sku name and account kind. - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "account" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_account_info.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_get_account_info_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) - response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) if cls: - return cls(pipeline_response, None, response_headers) - - get_account_info.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_page_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_page_blob_operations.py index 100f730254ed..38e66803e85b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_page_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_page_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,70 +7,98 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ... import models as _models - -T = TypeVar('T') +from ...operations._page_blob_operations import ( + build_clear_pages_request, + build_copy_incremental_request, + build_create_request, + build_get_page_ranges_diff_request, + build_get_page_ranges_request, + build_resize_request, + build_update_sequence_number_request, + build_upload_pages_from_url_request, + build_upload_pages_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class PageBlobOperations: - """PageBlobOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class PageBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`page_blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def create( + @distributed_trace_async + async def create( # pylint: disable=inconsistent-return-statements self, content_length: int, blob_content_length: int, timeout: Optional[int] = None, - tier: Optional[Union[str, "_models.PremiumPageBlobAccessTier"]] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", - blob_sequence_number: Optional[int] = 0, + tier: Optional[Union[str, _models.PremiumPageBlobAccessTier]] = None, + metadata: Optional[Dict[str, str]] = None, + blob_sequence_number: int = 0, request_id_parameter: Optional[str] = None, blob_tags_string: Optional[str] = None, - blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Create operation creates a new page blob. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 - TB. The page blob size must be aligned to a 512-byte boundary. - :type blob_content_length: long + TB. The page blob size must be aligned to a 512-byte boundary. Required. + :type blob_content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param tier: Optional. Indicates the tier to be set on the page blob. + :param tier: Optional. Indicates the tier to be set on the page blob. Known values are: "P4", + "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", and "P80". Default value is None. :type tier: str or ~azure.storage.blob.models.PremiumPageBlobAccessTier :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -77,42 +106,55 @@ async def create( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 - and 2^63 - 1. - :type blob_sequence_number: long + and 2^63 - 1. Default value is 0. + :type blob_sequence_number: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["PageBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "PageBlob")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -122,6 +164,7 @@ async def create( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -129,183 +172,184 @@ async def create( _if_none_match = None _if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - blob_type = "PageBlob" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_create_request( + url=self._config.url, + content_length=content_length, + blob_content_length=blob_content_length, + timeout=timeout, + tier=tier, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_sequence_number=blob_sequence_number, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + blob_type=blob_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long') - if blob_sequence_number is not None: - header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - create.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def upload_pages( + @distributed_trace_async + async def upload_pages( # pylint: disable=inconsistent-return-statements self, content_length: int, - body: IO, - transactional_content_md5: Optional[bytearray] = None, - transactional_content_crc64: Optional[bytearray] = None, + body: IO[bytes], + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, timeout: Optional[int] = None, range: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - sequence_number_access_conditions: Optional["_models.SequenceNumberAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Upload Pages operation writes a range of pages to a page blob. - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param sequence_number_access_conditions: Parameter group. - :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_sequence_number_less_than_or_equal_to = None _if_sequence_number_less_than = None @@ -315,168 +359,169 @@ async def upload_pages( _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_less_than_or_equal_to = ( + sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + ) if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if sequence_number_access_conditions is not None: - _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - comp = "page" - page_write = "update" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.upload_pages.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_sequence_number_less_than_or_equal_to is not None: - header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') - if _if_sequence_number_less_than is not None: - header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') - if _if_sequence_number_equal_to is not None: - header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = body + + _request = build_upload_pages_request( + url=self._config.url, + content_length=content_length, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + range=range, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + comp=comp, + page_write=page_write, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - upload_pages.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def clear_pages( + @distributed_trace_async + async def clear_pages( # pylint: disable=inconsistent-return-statements self, content_length: int, timeout: Optional[int] = None, range: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - sequence_number_access_conditions: Optional["_models.SequenceNumberAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Clear Pages operation clears a set of pages from a page blob. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param sequence_number_access_conditions: Parameter group. - :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["clear"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "clear")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_sequence_number_less_than_or_equal_to = None _if_sequence_number_less_than = None @@ -486,176 +531,174 @@ async def clear_pages( _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_less_than_or_equal_to = ( + sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + ) if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if sequence_number_access_conditions is not None: - _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - comp = "page" - page_write = "clear" - accept = "application/xml" - - # Construct URL - url = self.clear_pages.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_sequence_number_less_than_or_equal_to is not None: - header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') - if _if_sequence_number_less_than is not None: - header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') - if _if_sequence_number_equal_to is not None: - header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_clear_pages_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + range=range, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + page_write=page_write, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - clear_pages.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def upload_pages_from_url( + @distributed_trace_async + async def upload_pages_from_url( # pylint: disable=inconsistent-return-statements self, source_url: str, source_range: str, content_length: int, range: str, - source_content_md5: Optional[bytearray] = None, - source_contentcrc64: Optional[bytearray] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, timeout: Optional[int] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - sequence_number_access_conditions: Optional["_models.SequenceNumberAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - **kwargs + copy_source_authorization: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Upload Pages operation writes a range of pages to a page blob where the contents are read from a URL. - :param source_url: Specify a URL to the copy source. + :param source_url: Specify a URL to the copy source. Required. :type source_url: str :param source_range: Bytes of source data in the specified range. The length of this range - should match the ContentLength header and x-ms-range/Range destination range header. + should match the ContentLength header and x-ms-range/Range destination range header. Required. :type source_range: str - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param range: The range of bytes to which the source range would be written. The range should - be 512 aligned and range-end is required. + be 512 aligned and range-end is required. Required. :type range: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray + from the copy source. Default value is None. + :type source_content_md5: bytes :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. - :type source_contentcrc64: bytearray + read from the copy source. Default value is None. + :type source_contentcrc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param sequence_number_access_conditions: Parameter group. - :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _lease_id = None _if_sequence_number_less_than_or_equal_to = None @@ -671,164 +714,175 @@ async def upload_pages_from_url( _source_if_match = None _source_if_none_match = None if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_less_than_or_equal_to = ( + sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + ) if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if sequence_number_access_conditions is not None: - _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_unmodified_since = modified_access_conditions.if_unmodified_since if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match - comp = "page" - page_write = "update" - accept = "application/xml" - - # Construct URL - url = self.upload_pages_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') - header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if source_contentcrc64 is not None: - header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_sequence_number_less_than_or_equal_to is not None: - header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') - if _if_sequence_number_less_than is not None: - header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') - if _if_sequence_number_equal_to is not None: - header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_upload_pages_from_url_request( + url=self._config.url, + source_url=source_url, + source_range=source_range, + content_length=content_length, + range=range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + comp=comp, + page_write=page_write, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - upload_pages_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def get_page_ranges( self, snapshot: Optional[str] = None, timeout: Optional[int] = None, range: Optional[str] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> "_models.PageList": + marker: Optional[str] = None, + maxresults: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> _models.PageList: + # pylint: disable=line-too-long """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a page blob. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PageList, or the result of cls(response) + :return: PageList or the result of cls(response) :rtype: ~azure.storage.blob.models.PageList - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) + cls: ClsType[_models.PageList] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -838,75 +892,66 @@ async def get_page_ranges( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "pagelist" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_get_page_ranges_request( + url=self._config.url, + snapshot=snapshot, + timeout=timeout, + range=range, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + marker=marker, + maxresults=maxresults, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_page_ranges.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('PageList', pipeline_response) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("PageList", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_page_ranges.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def get_page_ranges_diff( self, snapshot: Optional[str] = None, @@ -915,55 +960,81 @@ async def get_page_ranges_diff( prev_snapshot_url: Optional[str] = None, range: Optional[str] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> "_models.PageList": + marker: Optional[str] = None, + maxresults: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> _models.PageList: + # pylint: disable=line-too-long """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were changed between target blob and previous snapshot. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param prevsnapshot: Optional in version 2015-07-08 and newer. The prevsnapshot parameter is a DateTime value that specifies that the response will contain only pages that were changed between target blob and previous snapshot. Changed pages include both updated and cleared pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is the older of the two. Note that incremental snapshots are currently supported only for blobs - created on or after January 1, 2016. + created on or after January 1, 2016. Default value is None. :type prevsnapshot: str :param prev_snapshot_url: Optional. This header is only supported in service versions 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The response will only contain pages that were changed between the target blob and its previous - snapshot. + snapshot. Default value is None. :type prev_snapshot_url: str - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PageList, or the result of cls(response) + :return: PageList or the result of cls(response) :rtype: ~azure.storage.blob.models.PageList - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) + cls: ClsType[_models.PageList] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -973,257 +1044,249 @@ async def get_page_ranges_diff( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "pagelist" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_get_page_ranges_diff_request( + url=self._config.url, + snapshot=snapshot, + timeout=timeout, + prevsnapshot=prevsnapshot, + prev_snapshot_url=prev_snapshot_url, + range=range, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + marker=marker, + maxresults=maxresults, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_page_ranges_diff.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if prevsnapshot is not None: - query_parameters['prevsnapshot'] = self._serialize.query("prevsnapshot", prevsnapshot, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if prev_snapshot_url is not None: - header_parameters['x-ms-previous-snapshot-url'] = self._serialize.header("prev_snapshot_url", prev_snapshot_url, 'str') - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('PageList', pipeline_response) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("PageList", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_page_ranges_diff.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore - async def resize( + @distributed_trace_async + async def resize( # pylint: disable=inconsistent-return-statements self, blob_content_length: int, timeout: Optional[int] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Resize the Blob. :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 - TB. The page blob size must be aligned to a 512-byte boundary. - :type blob_content_length: long + TB. The page blob size must be aligned to a 512-byte boundary. Required. + :type blob_content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "properties" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_resize_request( + url=self._config.url, + blob_content_length=blob_content_length, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.resize.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - resize.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def update_sequence_number( + @distributed_trace_async + async def update_sequence_number( # pylint: disable=inconsistent-return-statements self, - sequence_number_action: Union[str, "_models.SequenceNumberActionType"], + sequence_number_action: Union[str, _models.SequenceNumberActionType], timeout: Optional[int] = None, - blob_sequence_number: Optional[int] = 0, + blob_sequence_number: int = 0, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Update the sequence number of the blob. :param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the request. This property applies to page blobs only. This property indicates how the service - should modify the blob's sequence number. + should modify the blob's sequence number. Known values are: "max", "update", and "increment". + Required. :type sequence_number_action: str or ~azure.storage.blob.models.SequenceNumberActionType :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 - and 2^63 - 1. - :type blob_sequence_number: long + and 2^63 - 1. Default value is 0. + :type blob_sequence_number: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -1233,80 +1296,69 @@ async def update_sequence_number( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "properties" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_update_sequence_number_request( + url=self._config.url, + sequence_number_action=sequence_number_action, + timeout=timeout, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_sequence_number=blob_sequence_number, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.update_sequence_number.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-sequence-number-action'] = self._serialize.header("sequence_number_action", sequence_number_action, 'str') - if blob_sequence_number is not None: - header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - update_sequence_number.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def copy_incremental( + @distributed_trace_async + async def copy_incremental( # pylint: disable=inconsistent-return-statements self, copy_source: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. The snapshot is copied such that only the differential changes between the previously copied snapshot are transferred to the destination. The copied snapshots are complete copies of @@ -1316,93 +1368,89 @@ async def copy_incremental( :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["incrementalcopy"] = kwargs.pop("comp", _params.pop("comp", "incrementalcopy")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "incrementalcopy" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_copy_incremental_request( + url=self._config.url, + copy_source=copy_source, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.copy_incremental.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) if cls: - return cls(pipeline_response, None, response_headers) - - copy_incremental.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_patch.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_patch.py new file mode 100644 index 000000000000..71dde502c70f --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_patch.py @@ -0,0 +1,26 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + + from typing import List +__all__ = [] # type: List[str] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_service_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_service_operations.py index 91a064680562..0f0e61805e05 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_service_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/aio/operations/_service_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,593 +6,648 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, IO, List, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, AsyncIterator, Callable, Dict, IO, List, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ... import models as _models - -T = TypeVar('T') +from ...operations._service_operations import ( + build_filter_blobs_request, + build_get_account_info_request, + build_get_properties_request, + build_get_statistics_request, + build_get_user_delegation_key_request, + build_list_containers_segment_request, + build_set_properties_request, + build_submit_batch_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ServiceOperations: - """ServiceOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class ServiceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`service` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def set_properties( + @distributed_trace_async + async def set_properties( # pylint: disable=inconsistent-return-statements self, - storage_service_properties: "_models.StorageServiceProperties", + storage_service_properties: _models.StorageServiceProperties, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Sets properties for a storage account's Blob service endpoint, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. - :param storage_service_properties: The StorageService properties. + :param storage_service_properties: The StorageService properties. Required. :type storage_service_properties: ~azure.storage.blob.models.StorageServiceProperties :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "properties" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.set_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(storage_service_properties, 'StorageServiceProperties', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = self._serialize.body(storage_service_properties, "StorageServiceProperties", is_xml=True) + + _request = build_set_properties_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) if cls: - return cls(pipeline_response, None, response_headers) - - set_properties.metadata = {'url': '/'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def get_properties( - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs - ) -> "_models.StorageServiceProperties": + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> _models.StorageServiceProperties: + # pylint: disable=line-too-long """gets the properties of a storage account's Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: StorageServiceProperties, or the result of cls(response) + :return: StorageServiceProperties or the result of cls(response) :rtype: ~azure.storage.blob.models.StorageServiceProperties - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceProperties"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[_models.StorageServiceProperties] = kwargs.pop("cls", None) + + _request = build_get_properties_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - deserialized = self._deserialize('StorageServiceProperties', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_properties.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def get_statistics( - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs - ) -> "_models.StorageServiceStats": + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> _models.StorageServiceStats: + # pylint: disable=line-too-long """Retrieves statistics related to replication for the Blob service. It is only available on the secondary location endpoint when read-access geo-redundant replication is enabled for the storage account. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: StorageServiceStats, or the result of cls(response) + :return: StorageServiceStats or the result of cls(response) :rtype: ~azure.storage.blob.models.StorageServiceStats - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceStats"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "stats" - accept = "application/xml" - - # Construct URL - url = self.get_statistics.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) + cls: ClsType[_models.StorageServiceStats] = kwargs.pop("cls", None) + + _request = build_get_statistics_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('StorageServiceStats', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("StorageServiceStats", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_statistics.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def list_containers_segment( self, prefix: Optional[str] = None, marker: Optional[str] = None, maxresults: Optional[int] = None, - include: Optional[List[Union[str, "_models.ListContainersIncludeType"]]] = None, + include: Optional[List[Union[str, _models.ListContainersIncludeType]]] = None, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs - ) -> "_models.ListContainersSegmentResponse": + **kwargs: Any + ) -> _models.ListContainersSegmentResponse: + # pylint: disable=line-too-long """The List Containers Segment operation returns a list of the containers under the specified account. :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. + specified prefix. Default value is None. :type prefix: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int :param include: Include this parameter to specify that the container's metadata be returned as - part of the response body. + part of the response body. Default value is None. :type include: list[str or ~azure.storage.blob.models.ListContainersIncludeType] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListContainersSegmentResponse, or the result of cls(response) + :return: ListContainersSegmentResponse or the result of cls(response) :rtype: ~azure.storage.blob.models.ListContainersSegmentResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListContainersSegmentResponse"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "list" - accept = "application/xml" - - # Construct URL - url = self.list_containers_segment.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if prefix is not None: - query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - if include is not None: - query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + cls: ClsType[_models.ListContainersSegmentResponse] = kwargs.pop("cls", None) + + _request = build_list_containers_segment_request( + url=self._config.url, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - deserialized = self._deserialize('ListContainersSegmentResponse', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = self._deserialize("ListContainersSegmentResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - list_containers_segment.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def get_user_delegation_key( self, - key_info: "_models.KeyInfo", + key_info: _models.KeyInfo, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs - ) -> "_models.UserDelegationKey": + **kwargs: Any + ) -> _models.UserDelegationKey: + # pylint: disable=line-too-long """Retrieves a user delegation key for the Blob service. This is only a valid operation when using bearer token authentication. - :param key_info: + :param key_info: Key information. Required. :type key_info: ~azure.storage.blob.models.KeyInfo :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: UserDelegationKey, or the result of cls(response) + :return: UserDelegationKey or the result of cls(response) :rtype: ~azure.storage.blob.models.UserDelegationKey - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UserDelegationKey"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "userdelegationkey" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.get_user_delegation_key.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(key_info, 'KeyInfo', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[_models.UserDelegationKey] = kwargs.pop("cls", None) + + _content = self._serialize.body(key_info, "KeyInfo", is_xml=True) + + _request = build_get_user_delegation_key_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('UserDelegationKey', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("UserDelegationKey", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_user_delegation_key.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore - async def get_account_info( - self, - **kwargs + @distributed_trace_async + async def get_account_info( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Returns the sku name and account kind. - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - restype = "account" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_account_info.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_get_account_info_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) - response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) - response_headers['x-ms-is-hns-enabled']=self._deserialize('bool', response.headers.get('x-ms-is-hns-enabled')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) if cls: - return cls(pipeline_response, None, response_headers) - - get_account_info.metadata = {'url': '/'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def submit_batch( self, content_length: int, - multipart_content_type: str, - body: IO, + body: IO[bytes], timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs - ) -> IO: + **kwargs: Any + ) -> AsyncIterator[bytes]: + # pylint: disable=line-too-long """The Batch operation allows multiple API calls to be embedded into a single HTTP request. - :param content_length: The length of the request. - :type content_length: long - :param multipart_content_type: Required. The value of this header must be multipart/mixed with - a batch boundary. Example header value: multipart/mixed; boundary=batch_:code:``. - :type multipart_content_type: str - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IO, or the result of cls(response) - :rtype: IO - :raises: ~azure.core.exceptions.HttpResponseError + :return: AsyncIterator[bytes] or the result of cls(response) + :rtype: AsyncIterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[IO] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "batch" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.submit_batch.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - header_parameters['Content-Type'] = self._serialize.header("multipart_content_type", multipart_content_type, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(body, 'IO', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) + multipart_content_type: str = kwargs.pop( + "multipart_content_type", _headers.pop("Content-Type", "application/xml") + ) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _content = body + + _request = build_submit_batch_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + request_id_parameter=request_id_parameter, + comp=comp, + multipart_content_type=multipart_content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - submit_batch.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def filter_blobs( self, timeout: Optional[int] = None, @@ -599,93 +655,99 @@ async def filter_blobs( where: Optional[str] = None, marker: Optional[str] = None, maxresults: Optional[int] = None, - **kwargs - ) -> "_models.FilterBlobSegment": + include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, + **kwargs: Any + ) -> _models.FilterBlobSegment: + # pylint: disable=line-too-long """The Filter Blobs operation enables callers to list blobs across all containers whose tags match a given search expression. Filter blobs searches across all containers within a storage account but can be scoped within the expression to a single container. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param where: Filters the results to return only to return only blobs whose tags match the - specified expression. + specified expression. Default value is None. :type where: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int - :keyword callable cls: A custom type or function that will be passed the direct response - :return: FilterBlobSegment, or the result of cls(response) + :param include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :type include: list[str or ~azure.storage.blob.models.FilterBlobsIncludeItem] + :return: FilterBlobSegment or the result of cls(response) :rtype: ~azure.storage.blob.models.FilterBlobSegment - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.FilterBlobSegment"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "blobs" - accept = "application/xml" - - # Construct URL - url = self.filter_blobs.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if where is not None: - query_parameters['where'] = self._serialize.query("where", where, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) + cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) + + _request = build_filter_blobs_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + where=where, + marker=marker, + maxresults=maxresults, + include=include, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('FilterBlobSegment', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - filter_blobs.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/__init__.py index 9c98989e6847..63ca7e23fc24 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/__init__.py @@ -6,218 +6,168 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -try: - from ._models_py3 import AccessPolicy - from ._models_py3 import AppendPositionAccessConditions - from ._models_py3 import ArrowConfiguration - from ._models_py3 import ArrowField - from ._models_py3 import BlobFlatListSegment - from ._models_py3 import BlobHTTPHeaders - from ._models_py3 import BlobHierarchyListSegment - from ._models_py3 import BlobItemInternal - from ._models_py3 import BlobMetadata - from ._models_py3 import BlobPrefix - from ._models_py3 import BlobPropertiesInternal - from ._models_py3 import BlobTag - from ._models_py3 import BlobTags - from ._models_py3 import Block - from ._models_py3 import BlockList - from ._models_py3 import BlockLookupList - from ._models_py3 import ClearRange - from ._models_py3 import ContainerCpkScopeInfo - from ._models_py3 import ContainerItem - from ._models_py3 import ContainerProperties - from ._models_py3 import CorsRule - from ._models_py3 import CpkInfo - from ._models_py3 import CpkScopeInfo - from ._models_py3 import DataLakeStorageError - from ._models_py3 import DataLakeStorageErrorDetails - from ._models_py3 import DelimitedTextConfiguration - from ._models_py3 import DirectoryHttpHeaders - from ._models_py3 import FilterBlobItem - from ._models_py3 import FilterBlobSegment - from ._models_py3 import GeoReplication - from ._models_py3 import JsonTextConfiguration - from ._models_py3 import KeyInfo - from ._models_py3 import LeaseAccessConditions - from ._models_py3 import ListBlobsFlatSegmentResponse - from ._models_py3 import ListBlobsHierarchySegmentResponse - from ._models_py3 import ListContainersSegmentResponse - from ._models_py3 import Logging - from ._models_py3 import Metrics - from ._models_py3 import ModifiedAccessConditions - from ._models_py3 import PageList - from ._models_py3 import PageRange - from ._models_py3 import QueryFormat - from ._models_py3 import QueryRequest - from ._models_py3 import QuerySerialization - from ._models_py3 import RetentionPolicy - from ._models_py3 import SequenceNumberAccessConditions - from ._models_py3 import SignedIdentifier - from ._models_py3 import SourceModifiedAccessConditions - from ._models_py3 import StaticWebsite - from ._models_py3 import StorageError - from ._models_py3 import StorageServiceProperties - from ._models_py3 import StorageServiceStats - from ._models_py3 import UserDelegationKey -except (SyntaxError, ImportError): - from ._models import AccessPolicy # type: ignore - from ._models import AppendPositionAccessConditions # type: ignore - from ._models import ArrowConfiguration # type: ignore - from ._models import ArrowField # type: ignore - from ._models import BlobFlatListSegment # type: ignore - from ._models import BlobHTTPHeaders # type: ignore - from ._models import BlobHierarchyListSegment # type: ignore - from ._models import BlobItemInternal # type: ignore - from ._models import BlobMetadata # type: ignore - from ._models import BlobPrefix # type: ignore - from ._models import BlobPropertiesInternal # type: ignore - from ._models import BlobTag # type: ignore - from ._models import BlobTags # type: ignore - from ._models import Block # type: ignore - from ._models import BlockList # type: ignore - from ._models import BlockLookupList # type: ignore - from ._models import ClearRange # type: ignore - from ._models import ContainerCpkScopeInfo # type: ignore - from ._models import ContainerItem # type: ignore - from ._models import ContainerProperties # type: ignore - from ._models import CorsRule # type: ignore - from ._models import CpkInfo # type: ignore - from ._models import CpkScopeInfo # type: ignore - from ._models import DataLakeStorageError # type: ignore - from ._models import DataLakeStorageErrorDetails # type: ignore - from ._models import DelimitedTextConfiguration # type: ignore - from ._models import DirectoryHttpHeaders # type: ignore - from ._models import FilterBlobItem # type: ignore - from ._models import FilterBlobSegment # type: ignore - from ._models import GeoReplication # type: ignore - from ._models import JsonTextConfiguration # type: ignore - from ._models import KeyInfo # type: ignore - from ._models import LeaseAccessConditions # type: ignore - from ._models import ListBlobsFlatSegmentResponse # type: ignore - from ._models import ListBlobsHierarchySegmentResponse # type: ignore - from ._models import ListContainersSegmentResponse # type: ignore - from ._models import Logging # type: ignore - from ._models import Metrics # type: ignore - from ._models import ModifiedAccessConditions # type: ignore - from ._models import PageList # type: ignore - from ._models import PageRange # type: ignore - from ._models import QueryFormat # type: ignore - from ._models import QueryRequest # type: ignore - from ._models import QuerySerialization # type: ignore - from ._models import RetentionPolicy # type: ignore - from ._models import SequenceNumberAccessConditions # type: ignore - from ._models import SignedIdentifier # type: ignore - from ._models import SourceModifiedAccessConditions # type: ignore - from ._models import StaticWebsite # type: ignore - from ._models import StorageError # type: ignore - from ._models import StorageServiceProperties # type: ignore - from ._models import StorageServiceStats # type: ignore - from ._models import UserDelegationKey # type: ignore +from ._models_py3 import AccessPolicy +from ._models_py3 import AppendPositionAccessConditions +from ._models_py3 import ArrowConfiguration +from ._models_py3 import ArrowField +from ._models_py3 import BlobFlatListSegment +from ._models_py3 import BlobHTTPHeaders +from ._models_py3 import BlobHierarchyListSegment +from ._models_py3 import BlobItemInternal +from ._models_py3 import BlobMetadata +from ._models_py3 import BlobName +from ._models_py3 import BlobPrefix +from ._models_py3 import BlobPropertiesInternal +from ._models_py3 import BlobTag +from ._models_py3 import BlobTags +from ._models_py3 import Block +from ._models_py3 import BlockList +from ._models_py3 import BlockLookupList +from ._models_py3 import ClearRange +from ._models_py3 import ContainerCpkScopeInfo +from ._models_py3 import ContainerItem +from ._models_py3 import ContainerProperties +from ._models_py3 import CorsRule +from ._models_py3 import CpkInfo +from ._models_py3 import CpkScopeInfo +from ._models_py3 import DelimitedTextConfiguration +from ._models_py3 import FilterBlobItem +from ._models_py3 import FilterBlobSegment +from ._models_py3 import GeoReplication +from ._models_py3 import JsonTextConfiguration +from ._models_py3 import KeyInfo +from ._models_py3 import LeaseAccessConditions +from ._models_py3 import ListBlobsFlatSegmentResponse +from ._models_py3 import ListBlobsHierarchySegmentResponse +from ._models_py3 import ListContainersSegmentResponse +from ._models_py3 import Logging +from ._models_py3 import Metrics +from ._models_py3 import ModifiedAccessConditions +from ._models_py3 import PageList +from ._models_py3 import PageRange +from ._models_py3 import QueryFormat +from ._models_py3 import QueryRequest +from ._models_py3 import QuerySerialization +from ._models_py3 import RetentionPolicy +from ._models_py3 import SequenceNumberAccessConditions +from ._models_py3 import SignedIdentifier +from ._models_py3 import SourceModifiedAccessConditions +from ._models_py3 import StaticWebsite +from ._models_py3 import StorageError +from ._models_py3 import StorageServiceProperties +from ._models_py3 import StorageServiceStats +from ._models_py3 import UserDelegationKey -from ._azure_blob_storage_enums import ( - AccessTier, - AccessTierOptional, - AccessTierRequired, - AccountKind, - ArchiveStatus, - BlobExpiryOptions, - BlobType, - BlockListType, - CopyStatusType, - DeleteSnapshotsOptionType, - GeoReplicationStatusType, - LeaseDurationType, - LeaseStateType, - LeaseStatusType, - ListBlobsIncludeItem, - ListContainersIncludeType, - PathRenameMode, - PremiumPageBlobAccessTier, - PublicAccessType, - QueryFormatType, - RehydratePriority, - SequenceNumberActionType, - SkuName, - StorageErrorCode, -) +from ._azure_blob_storage_enums import AccessTier +from ._azure_blob_storage_enums import AccessTierOptional +from ._azure_blob_storage_enums import AccessTierRequired +from ._azure_blob_storage_enums import AccountKind +from ._azure_blob_storage_enums import ArchiveStatus +from ._azure_blob_storage_enums import BlobCopySourceTags +from ._azure_blob_storage_enums import BlobExpiryOptions +from ._azure_blob_storage_enums import BlobImmutabilityPolicyMode +from ._azure_blob_storage_enums import BlobType +from ._azure_blob_storage_enums import BlockListType +from ._azure_blob_storage_enums import CopyStatusType +from ._azure_blob_storage_enums import DeleteSnapshotsOptionType +from ._azure_blob_storage_enums import EncryptionAlgorithmType +from ._azure_blob_storage_enums import FilterBlobsIncludeItem +from ._azure_blob_storage_enums import GeoReplicationStatusType +from ._azure_blob_storage_enums import LeaseDurationType +from ._azure_blob_storage_enums import LeaseStateType +from ._azure_blob_storage_enums import LeaseStatusType +from ._azure_blob_storage_enums import ListBlobsIncludeItem +from ._azure_blob_storage_enums import ListContainersIncludeType +from ._azure_blob_storage_enums import PremiumPageBlobAccessTier +from ._azure_blob_storage_enums import PublicAccessType +from ._azure_blob_storage_enums import QueryFormatType +from ._azure_blob_storage_enums import RehydratePriority +from ._azure_blob_storage_enums import SequenceNumberActionType +from ._azure_blob_storage_enums import SkuName +from ._azure_blob_storage_enums import StorageErrorCode +from ._patch import __all__ as _patch_all +from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk __all__ = [ - 'AccessPolicy', - 'AppendPositionAccessConditions', - 'ArrowConfiguration', - 'ArrowField', - 'BlobFlatListSegment', - 'BlobHTTPHeaders', - 'BlobHierarchyListSegment', - 'BlobItemInternal', - 'BlobMetadata', - 'BlobPrefix', - 'BlobPropertiesInternal', - 'BlobTag', - 'BlobTags', - 'Block', - 'BlockList', - 'BlockLookupList', - 'ClearRange', - 'ContainerCpkScopeInfo', - 'ContainerItem', - 'ContainerProperties', - 'CorsRule', - 'CpkInfo', - 'CpkScopeInfo', - 'DataLakeStorageError', - 'DataLakeStorageErrorDetails', - 'DelimitedTextConfiguration', - 'DirectoryHttpHeaders', - 'FilterBlobItem', - 'FilterBlobSegment', - 'GeoReplication', - 'JsonTextConfiguration', - 'KeyInfo', - 'LeaseAccessConditions', - 'ListBlobsFlatSegmentResponse', - 'ListBlobsHierarchySegmentResponse', - 'ListContainersSegmentResponse', - 'Logging', - 'Metrics', - 'ModifiedAccessConditions', - 'PageList', - 'PageRange', - 'QueryFormat', - 'QueryRequest', - 'QuerySerialization', - 'RetentionPolicy', - 'SequenceNumberAccessConditions', - 'SignedIdentifier', - 'SourceModifiedAccessConditions', - 'StaticWebsite', - 'StorageError', - 'StorageServiceProperties', - 'StorageServiceStats', - 'UserDelegationKey', - 'AccessTier', - 'AccessTierOptional', - 'AccessTierRequired', - 'AccountKind', - 'ArchiveStatus', - 'BlobExpiryOptions', - 'BlobType', - 'BlockListType', - 'CopyStatusType', - 'DeleteSnapshotsOptionType', - 'GeoReplicationStatusType', - 'LeaseDurationType', - 'LeaseStateType', - 'LeaseStatusType', - 'ListBlobsIncludeItem', - 'ListContainersIncludeType', - 'PathRenameMode', - 'PremiumPageBlobAccessTier', - 'PublicAccessType', - 'QueryFormatType', - 'RehydratePriority', - 'SequenceNumberActionType', - 'SkuName', - 'StorageErrorCode', + "AccessPolicy", + "AppendPositionAccessConditions", + "ArrowConfiguration", + "ArrowField", + "BlobFlatListSegment", + "BlobHTTPHeaders", + "BlobHierarchyListSegment", + "BlobItemInternal", + "BlobMetadata", + "BlobName", + "BlobPrefix", + "BlobPropertiesInternal", + "BlobTag", + "BlobTags", + "Block", + "BlockList", + "BlockLookupList", + "ClearRange", + "ContainerCpkScopeInfo", + "ContainerItem", + "ContainerProperties", + "CorsRule", + "CpkInfo", + "CpkScopeInfo", + "DelimitedTextConfiguration", + "FilterBlobItem", + "FilterBlobSegment", + "GeoReplication", + "JsonTextConfiguration", + "KeyInfo", + "LeaseAccessConditions", + "ListBlobsFlatSegmentResponse", + "ListBlobsHierarchySegmentResponse", + "ListContainersSegmentResponse", + "Logging", + "Metrics", + "ModifiedAccessConditions", + "PageList", + "PageRange", + "QueryFormat", + "QueryRequest", + "QuerySerialization", + "RetentionPolicy", + "SequenceNumberAccessConditions", + "SignedIdentifier", + "SourceModifiedAccessConditions", + "StaticWebsite", + "StorageError", + "StorageServiceProperties", + "StorageServiceStats", + "UserDelegationKey", + "AccessTier", + "AccessTierOptional", + "AccessTierRequired", + "AccountKind", + "ArchiveStatus", + "BlobCopySourceTags", + "BlobExpiryOptions", + "BlobImmutabilityPolicyMode", + "BlobType", + "BlockListType", + "CopyStatusType", + "DeleteSnapshotsOptionType", + "EncryptionAlgorithmType", + "FilterBlobsIncludeItem", + "GeoReplicationStatusType", + "LeaseDurationType", + "LeaseStateType", + "LeaseStatusType", + "ListBlobsIncludeItem", + "ListContainersIncludeType", + "PremiumPageBlobAccessTier", + "PublicAccessType", + "QueryFormatType", + "RehydratePriority", + "SequenceNumberActionType", + "SkuName", + "StorageErrorCode", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/_azure_blob_storage_enums.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/_azure_blob_storage_enums.py index 2df7b1ad6219..12ccbf7312f9 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/_azure_blob_storage_enums.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/_azure_blob_storage_enums.py @@ -6,27 +6,12 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from enum import Enum, EnumMeta -from six import with_metaclass +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta -class _CaseInsensitiveEnumMeta(EnumMeta): - def __getitem__(self, name): - return super().__getitem__(name.upper()) - def __getattr__(cls, name): - """Return the enum member matching `name` - We use __getattr__ instead of descriptors or inserting into the enum - class' __dict__ in order to support `name` and `value` being both - properties for enum members (which live in the class' __dict__) and - enum members themselves. - """ - try: - return cls._member_map_[name.upper()] - except KeyError: - raise AttributeError(name) - - -class AccessTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class AccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AccessTier.""" P4 = "P4" P6 = "P6" @@ -42,8 +27,12 @@ class AccessTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): HOT = "Hot" COOL = "Cool" ARCHIVE = "Archive" + PREMIUM = "Premium" + COLD = "Cold" + -class AccessTierOptional(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class AccessTierOptional(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AccessTierOptional.""" P4 = "P4" P6 = "P6" @@ -59,8 +48,11 @@ class AccessTierOptional(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): HOT = "Hot" COOL = "Cool" ARCHIVE = "Archive" + COLD = "Cold" -class AccessTierRequired(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class AccessTierRequired(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AccessTierRequired.""" P4 = "P4" P6 = "P6" @@ -76,8 +68,11 @@ class AccessTierRequired(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): HOT = "Hot" COOL = "Cool" ARCHIVE = "Archive" + COLD = "Cold" + -class AccountKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class AccountKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AccountKind.""" STORAGE = "Storage" BLOB_STORAGE = "BlobStorage" @@ -85,56 +80,102 @@ class AccountKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): FILE_STORAGE = "FileStorage" BLOCK_BLOB_STORAGE = "BlockBlobStorage" -class ArchiveStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class ArchiveStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ArchiveStatus.""" REHYDRATE_PENDING_TO_HOT = "rehydrate-pending-to-hot" REHYDRATE_PENDING_TO_COOL = "rehydrate-pending-to-cool" + REHYDRATE_PENDING_TO_COLD = "rehydrate-pending-to-cold" + + +class BlobCopySourceTags(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """BlobCopySourceTags.""" + + REPLACE = "REPLACE" + COPY = "COPY" + -class BlobExpiryOptions(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class BlobExpiryOptions(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """BlobExpiryOptions.""" NEVER_EXPIRE = "NeverExpire" RELATIVE_TO_CREATION = "RelativeToCreation" RELATIVE_TO_NOW = "RelativeToNow" ABSOLUTE = "Absolute" -class BlobType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class BlobImmutabilityPolicyMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """BlobImmutabilityPolicyMode.""" + + MUTABLE = "Mutable" + UNLOCKED = "Unlocked" + LOCKED = "Locked" + + +class BlobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """BlobType.""" BLOCK_BLOB = "BlockBlob" PAGE_BLOB = "PageBlob" APPEND_BLOB = "AppendBlob" -class BlockListType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class BlockListType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """BlockListType.""" COMMITTED = "committed" UNCOMMITTED = "uncommitted" ALL = "all" -class CopyStatusType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class CopyStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """CopyStatusType.""" PENDING = "pending" SUCCESS = "success" ABORTED = "aborted" FAILED = "failed" -class DeleteSnapshotsOptionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class DeleteSnapshotsOptionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """DeleteSnapshotsOptionType.""" INCLUDE = "include" ONLY = "only" -class GeoReplicationStatusType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The status of the secondary location - """ + +class EncryptionAlgorithmType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """EncryptionAlgorithmType.""" + + NONE = "None" + AES256 = "AES256" + + +class FilterBlobsIncludeItem(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """FilterBlobsIncludeItem.""" + + NONE = "none" + VERSIONS = "versions" + + +class GeoReplicationStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The status of the secondary location.""" LIVE = "live" BOOTSTRAP = "bootstrap" UNAVAILABLE = "unavailable" -class LeaseDurationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class LeaseDurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """LeaseDurationType.""" INFINITE = "infinite" FIXED = "fixed" -class LeaseStateType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class LeaseStateType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """LeaseStateType.""" AVAILABLE = "available" LEASED = "leased" @@ -142,12 +183,16 @@ class LeaseStateType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): BREAKING = "breaking" BROKEN = "broken" -class LeaseStatusType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class LeaseStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """LeaseStatusType.""" LOCKED = "locked" UNLOCKED = "unlocked" -class ListBlobsIncludeItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class ListBlobsIncludeItem(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ListBlobsIncludeItem.""" COPY = "copy" DELETED = "deleted" @@ -156,18 +201,21 @@ class ListBlobsIncludeItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): UNCOMMITTEDBLOBS = "uncommittedblobs" VERSIONS = "versions" TAGS = "tags" + IMMUTABILITYPOLICY = "immutabilitypolicy" + LEGALHOLD = "legalhold" + DELETEDWITHVERSIONS = "deletedwithversions" -class ListContainersIncludeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class ListContainersIncludeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ListContainersIncludeType.""" METADATA = "metadata" DELETED = "deleted" + SYSTEM = "system" -class PathRenameMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - - LEGACY = "legacy" - POSIX = "posix" -class PremiumPageBlobAccessTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class PremiumPageBlobAccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """PremiumPageBlobAccessTier.""" P4 = "P4" P6 = "P6" @@ -181,20 +229,24 @@ class PremiumPageBlobAccessTier(with_metaclass(_CaseInsensitiveEnumMeta, str, En P70 = "P70" P80 = "P80" -class PublicAccessType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class PublicAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """PublicAccessType.""" CONTAINER = "container" BLOB = "blob" -class QueryFormatType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The quick query format type. - """ + +class QueryFormatType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The quick query format type.""" DELIMITED = "delimited" JSON = "json" ARROW = "arrow" + PARQUET = "parquet" -class RehydratePriority(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class RehydratePriority(str, Enum, metaclass=CaseInsensitiveEnumMeta): """If an object is in rehydrate pending state then this header is returned with priority of rehydrate. Valid values are High and Standard. """ @@ -202,13 +254,17 @@ class RehydratePriority(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): HIGH = "High" STANDARD = "Standard" -class SequenceNumberActionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class SequenceNumberActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """SequenceNumberActionType.""" MAX = "max" UPDATE = "update" INCREMENT = "increment" -class SkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class SkuName(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """SkuName.""" STANDARD_LRS = "Standard_LRS" STANDARD_GRS = "Standard_GRS" @@ -216,9 +272,9 @@ class SkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): STANDARD_ZRS = "Standard_ZRS" PREMIUM_LRS = "Premium_LRS" -class StorageErrorCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Error codes returned by the service - """ + +class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Error codes returned by the service.""" ACCOUNT_ALREADY_EXISTS = "AccountAlreadyExists" ACCOUNT_BEING_CREATED = "AccountBeingCreated" @@ -268,6 +324,7 @@ class StorageErrorCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): BLOB_NOT_FOUND = "BlobNotFound" BLOB_OVERWRITTEN = "BlobOverwritten" BLOB_TIER_INADEQUATE_FOR_CONTENT_LENGTH = "BlobTierInadequateForContentLength" + BLOB_USES_CUSTOMER_SPECIFIED_ENCRYPTION = "BlobUsesCustomerSpecifiedEncryption" BLOCK_COUNT_EXCEEDS_LIMIT = "BlockCountExceedsLimit" BLOCK_LIST_TOO_LONG = "BlockListTooLong" CANNOT_CHANGE_TO_LOWER_TIER = "CannotChangeToLowerTier" @@ -281,7 +338,7 @@ class StorageErrorCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): COPY_ID_MISMATCH = "CopyIdMismatch" FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" - INCREMENTAL_COPY_OF_ERALIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEralierVersionSnapshotNotAllowed" + INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" INCREMENTAL_COPY_SOURCE_MUST_BE_SNAPSHOT = "IncrementalCopySourceMustBeSnapshot" INFINITE_LEASE_DURATION_REQUIRED = "InfiniteLeaseDurationRequired" INVALID_BLOB_OR_BLOCK = "InvalidBlobOrBlock" @@ -318,7 +375,7 @@ class StorageErrorCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): SEQUENCE_NUMBER_CONDITION_NOT_MET = "SequenceNumberConditionNotMet" SEQUENCE_NUMBER_INCREMENT_TOO_LARGE = "SequenceNumberIncrementTooLarge" SNAPSHOT_COUNT_EXCEEDED = "SnapshotCountExceeded" - SNAPHOT_OPERATION_RATE_EXCEEDED = "SnaphotOperationRateExceeded" + SNAPSHOT_OPERATION_RATE_EXCEEDED = "SnapshotOperationRateExceeded" SNAPSHOTS_PRESENT = "SnapshotsPresent" SOURCE_CONDITION_NOT_MET = "SourceConditionNotMet" SYSTEM_IN_USE = "SystemInUse" @@ -332,3 +389,4 @@ class StorageErrorCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): AUTHORIZATION_PERMISSION_MISMATCH = "AuthorizationPermissionMismatch" AUTHORIZATION_SERVICE_MISMATCH = "AuthorizationServiceMismatch" AUTHORIZATION_RESOURCE_TYPE_MISMATCH = "AuthorizationResourceTypeMismatch" + BLOB_ACCESS_TIER_NOT_SUPPORTED_FOR_ACCOUNT_TYPE = "BlobAccessTierNotSupportedForAccountType" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/_models_py3.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/_models_py3.py index b1339f08fbdc..cd88cb20487f 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/_models_py3.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/_models_py3.py @@ -1,4 +1,5 @@ # coding=utf-8 +# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. @@ -7,29 +8,37 @@ # -------------------------------------------------------------------------- import datetime -from typing import Dict, List, Optional, Union +import sys +from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union -from azure.core.exceptions import HttpResponseError -import msrest.serialization +from .. import _serialization -from ._azure_blob_storage_enums import * +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from .. import models as _models +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -class AccessPolicy(msrest.serialization.Model): + +class AccessPolicy(_serialization.Model): """An Access policy. - :param start: the date-time the policy is active. - :type start: str - :param expiry: the date-time the policy expires. - :type expiry: str - :param permission: the permissions for the acl policy. - :type permission: str + :ivar start: the date-time the policy is active. + :vartype start: str + :ivar expiry: the date-time the policy expires. + :vartype expiry: str + :ivar permission: the permissions for the acl policy. + :vartype permission: str """ _attribute_map = { - 'start': {'key': 'Start', 'type': 'str'}, - 'expiry': {'key': 'Expiry', 'type': 'str'}, - 'permission': {'key': 'Permission', 'type': 'str'}, + "start": {"key": "Start", "type": "str"}, + "expiry": {"key": "Expiry", "type": "str"}, + "permission": {"key": "Permission", "type": "str"}, } def __init__( @@ -38,104 +47,117 @@ def __init__( start: Optional[str] = None, expiry: Optional[str] = None, permission: Optional[str] = None, - **kwargs - ): - super(AccessPolicy, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword start: the date-time the policy is active. + :paramtype start: str + :keyword expiry: the date-time the policy expires. + :paramtype expiry: str + :keyword permission: the permissions for the acl policy. + :paramtype permission: str + """ + super().__init__(**kwargs) self.start = start self.expiry = expiry self.permission = permission -class AppendPositionAccessConditions(msrest.serialization.Model): +class AppendPositionAccessConditions(_serialization.Model): """Parameter group. - :param max_size: Optional conditional header. The max length in bytes permitted for the append + :ivar max_size: Optional conditional header. The max length in bytes permitted for the append blob. If the Append Block operation would cause the blob to exceed that limit or if the blob size is already greater than the value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). - :type max_size: long - :param append_position: Optional conditional header, used only for the Append Block operation. - A number indicating the byte offset to compare. Append Block will succeed only if the append + :vartype max_size: int + :ivar append_position: Optional conditional header, used only for the Append Block operation. A + number indicating the byte offset to compare. Append Block will succeed only if the append position is equal to this number. If it is not, the request will fail with the AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). - :type append_position: long + :vartype append_position: int """ _attribute_map = { - 'max_size': {'key': 'maxSize', 'type': 'long'}, - 'append_position': {'key': 'appendPosition', 'type': 'long'}, - } - - def __init__( - self, - *, - max_size: Optional[int] = None, - append_position: Optional[int] = None, - **kwargs - ): - super(AppendPositionAccessConditions, self).__init__(**kwargs) + "max_size": {"key": "maxSize", "type": "int"}, + "append_position": {"key": "appendPosition", "type": "int"}, + } + + def __init__(self, *, max_size: Optional[int] = None, append_position: Optional[int] = None, **kwargs: Any) -> None: + """ + :keyword max_size: Optional conditional header. The max length in bytes permitted for the + append blob. If the Append Block operation would cause the blob to exceed that limit or if the + blob size is already greater than the value specified in this header, the request will fail + with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :paramtype max_size: int + :keyword append_position: Optional conditional header, used only for the Append Block + operation. A number indicating the byte offset to compare. Append Block will succeed only if + the append position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). + :paramtype append_position: int + """ + super().__init__(**kwargs) self.max_size = max_size self.append_position = append_position -class ArrowConfiguration(msrest.serialization.Model): - """arrow configuration. +class ArrowConfiguration(_serialization.Model): + """Groups the settings used for formatting the response if the response should be Arrow formatted. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param schema: Required. - :type schema: list[~azure.storage.blob.models.ArrowField] + :ivar schema: Required. + :vartype schema: list[~azure.storage.blob.models.ArrowField] """ _validation = { - 'schema': {'required': True}, + "schema": {"required": True}, } _attribute_map = { - 'schema': {'key': 'Schema', 'type': '[ArrowField]', 'xml': {'name': 'Schema', 'wrapped': True, 'itemsName': 'Field'}}, - } - _xml_map = { - 'name': 'ArrowConfiguration' - } - - def __init__( - self, - *, - schema: List["ArrowField"], - **kwargs - ): - super(ArrowConfiguration, self).__init__(**kwargs) + "schema": { + "key": "Schema", + "type": "[ArrowField]", + "xml": {"name": "Schema", "wrapped": True, "itemsName": "Field"}, + }, + } + _xml_map = {"name": "ArrowConfiguration"} + + def __init__(self, *, schema: List["_models.ArrowField"], **kwargs: Any) -> None: + """ + :keyword schema: Required. + :paramtype schema: list[~azure.storage.blob.models.ArrowField] + """ + super().__init__(**kwargs) self.schema = schema -class ArrowField(msrest.serialization.Model): - """field of an arrow schema. +class ArrowField(_serialization.Model): + """Groups settings regarding specific field of an arrow schema. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param type: Required. - :type type: str - :param name: - :type name: str - :param precision: - :type precision: int - :param scale: - :type scale: int + :ivar type: Required. + :vartype type: str + :ivar name: + :vartype name: str + :ivar precision: + :vartype precision: int + :ivar scale: + :vartype scale: int """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'Type', 'type': 'str'}, - 'name': {'key': 'Name', 'type': 'str'}, - 'precision': {'key': 'Precision', 'type': 'int'}, - 'scale': {'key': 'Scale', 'type': 'int'}, - } - _xml_map = { - 'name': 'Field' + "type": {"key": "Type", "type": "str"}, + "name": {"key": "Name", "type": "str"}, + "precision": {"key": "Precision", "type": "int"}, + "scale": {"key": "Scale", "type": "int"}, } + _xml_map = {"name": "Field"} def __init__( self, @@ -144,109 +166,120 @@ def __init__( name: Optional[str] = None, precision: Optional[int] = None, scale: Optional[int] = None, - **kwargs - ): - super(ArrowField, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword type: Required. + :paramtype type: str + :keyword name: + :paramtype name: str + :keyword precision: + :paramtype precision: int + :keyword scale: + :paramtype scale: int + """ + super().__init__(**kwargs) self.type = type self.name = name self.precision = precision self.scale = scale -class BlobFlatListSegment(msrest.serialization.Model): +class BlobFlatListSegment(_serialization.Model): """BlobFlatListSegment. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param blob_items: Required. - :type blob_items: list[~azure.storage.blob.models.BlobItemInternal] + :ivar blob_items: Required. + :vartype blob_items: list[~azure.storage.blob.models.BlobItemInternal] """ _validation = { - 'blob_items': {'required': True}, + "blob_items": {"required": True}, } _attribute_map = { - 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]'}, - } - _xml_map = { - 'name': 'Blobs' + "blob_items": {"key": "BlobItems", "type": "[BlobItemInternal]", "xml": {"itemsName": "Blob"}}, } + _xml_map = {"name": "Blobs"} - def __init__( - self, - *, - blob_items: List["BlobItemInternal"], - **kwargs - ): - super(BlobFlatListSegment, self).__init__(**kwargs) + def __init__(self, *, blob_items: List["_models.BlobItemInternal"], **kwargs: Any) -> None: + """ + :keyword blob_items: Required. + :paramtype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + super().__init__(**kwargs) self.blob_items = blob_items -class BlobHierarchyListSegment(msrest.serialization.Model): +class BlobHierarchyListSegment(_serialization.Model): """BlobHierarchyListSegment. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param blob_prefixes: - :type blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] - :param blob_items: Required. - :type blob_items: list[~azure.storage.blob.models.BlobItemInternal] + :ivar blob_prefixes: + :vartype blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] + :ivar blob_items: Required. + :vartype blob_items: list[~azure.storage.blob.models.BlobItemInternal] """ _validation = { - 'blob_items': {'required': True}, + "blob_items": {"required": True}, } _attribute_map = { - 'blob_prefixes': {'key': 'BlobPrefixes', 'type': '[BlobPrefix]', 'xml': {'name': 'BlobPrefix'}}, - 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'Blob', 'itemsName': 'Blob'}}, - } - _xml_map = { - 'name': 'Blobs' + "blob_prefixes": {"key": "BlobPrefixes", "type": "[BlobPrefix]", "xml": {"name": "BlobPrefix"}}, + "blob_items": {"key": "BlobItems", "type": "[BlobItemInternal]", "xml": {"name": "Blob", "itemsName": "Blob"}}, } + _xml_map = {"name": "Blobs"} def __init__( self, *, - blob_items: List["BlobItemInternal"], - blob_prefixes: Optional[List["BlobPrefix"]] = None, - **kwargs - ): - super(BlobHierarchyListSegment, self).__init__(**kwargs) + blob_items: List["_models.BlobItemInternal"], + blob_prefixes: Optional[List["_models.BlobPrefix"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword blob_prefixes: + :paramtype blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] + :keyword blob_items: Required. + :paramtype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + super().__init__(**kwargs) self.blob_prefixes = blob_prefixes self.blob_items = blob_items -class BlobHTTPHeaders(msrest.serialization.Model): +class BlobHTTPHeaders(_serialization.Model): """Parameter group. - :param blob_cache_control: Optional. Sets the blob's cache control. If specified, this property + :ivar blob_cache_control: Optional. Sets the blob's cache control. If specified, this property is stored with the blob and returned with a read request. - :type blob_cache_control: str - :param blob_content_type: Optional. Sets the blob's content type. If specified, this property - is stored with the blob and returned with a read request. - :type blob_content_type: str - :param blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is not + :vartype blob_cache_control: str + :ivar blob_content_type: Optional. Sets the blob's content type. If specified, this property is + stored with the blob and returned with a read request. + :vartype blob_content_type: str + :ivar blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for the individual blocks were validated when each was uploaded. - :type blob_content_md5: bytearray - :param blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + :vartype blob_content_md5: bytes + :ivar blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this property is stored with the blob and returned with a read request. - :type blob_content_encoding: str - :param blob_content_language: Optional. Set the blob's content language. If specified, this + :vartype blob_content_encoding: str + :ivar blob_content_language: Optional. Set the blob's content language. If specified, this property is stored with the blob and returned with a read request. - :type blob_content_language: str - :param blob_content_disposition: Optional. Sets the blob's Content-Disposition header. - :type blob_content_disposition: str + :vartype blob_content_language: str + :ivar blob_content_disposition: Optional. Sets the blob's Content-Disposition header. + :vartype blob_content_disposition: str """ _attribute_map = { - 'blob_cache_control': {'key': 'blobCacheControl', 'type': 'str'}, - 'blob_content_type': {'key': 'blobContentType', 'type': 'str'}, - 'blob_content_md5': {'key': 'blobContentMD5', 'type': 'bytearray'}, - 'blob_content_encoding': {'key': 'blobContentEncoding', 'type': 'str'}, - 'blob_content_language': {'key': 'blobContentLanguage', 'type': 'str'}, - 'blob_content_disposition': {'key': 'blobContentDisposition', 'type': 'str'}, + "blob_cache_control": {"key": "blobCacheControl", "type": "str"}, + "blob_content_type": {"key": "blobContentType", "type": "str"}, + "blob_content_md5": {"key": "blobContentMD5", "type": "bytearray"}, + "blob_content_encoding": {"key": "blobContentEncoding", "type": "str"}, + "blob_content_language": {"key": "blobContentLanguage", "type": "str"}, + "blob_content_disposition": {"key": "blobContentDisposition", "type": "str"}, } def __init__( @@ -254,13 +287,32 @@ def __init__( *, blob_cache_control: Optional[str] = None, blob_content_type: Optional[str] = None, - blob_content_md5: Optional[bytearray] = None, + blob_content_md5: Optional[bytes] = None, blob_content_encoding: Optional[str] = None, blob_content_language: Optional[str] = None, blob_content_disposition: Optional[str] = None, - **kwargs - ): - super(BlobHTTPHeaders, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. + :paramtype blob_content_type: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + :paramtype blob_content_md5: bytes + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. + :paramtype blob_content_language: str + :keyword blob_content_disposition: Optional. Sets the blob's Content-Disposition header. + :paramtype blob_content_disposition: str + """ + super().__init__(**kwargs) self.blob_cache_control = blob_cache_control self.blob_content_type = blob_content_type self.blob_content_md5 = blob_content_md5 @@ -269,68 +321,92 @@ def __init__( self.blob_content_disposition = blob_content_disposition -class BlobItemInternal(msrest.serialization.Model): +class BlobItemInternal(_serialization.Model): """An Azure Storage blob. - All required parameters must be populated in order to send to Azure. - - :param name: Required. - :type name: str - :param deleted: Required. - :type deleted: bool - :param snapshot: Required. - :type snapshot: str - :param version_id: - :type version_id: str - :param is_current_version: - :type is_current_version: bool - :param properties: Required. Properties of a blob. - :type properties: ~azure.storage.blob.models.BlobPropertiesInternal - :param metadata: - :type metadata: ~azure.storage.blob.models.BlobMetadata - :param blob_tags: Blob tags. - :type blob_tags: ~azure.storage.blob.models.BlobTags - :param object_replication_metadata: Dictionary of :code:``. - :type object_replication_metadata: dict[str, str] + All required parameters must be populated in order to send to server. + + :ivar name: Required. + :vartype name: ~azure.storage.blob.models.BlobName + :ivar deleted: Required. + :vartype deleted: bool + :ivar snapshot: Required. + :vartype snapshot: str + :ivar version_id: + :vartype version_id: str + :ivar is_current_version: + :vartype is_current_version: bool + :ivar properties: Properties of a blob. Required. + :vartype properties: ~azure.storage.blob.models.BlobPropertiesInternal + :ivar metadata: + :vartype metadata: ~azure.storage.blob.models.BlobMetadata + :ivar blob_tags: Blob tags. + :vartype blob_tags: ~azure.storage.blob.models.BlobTags + :ivar has_versions_only: + :vartype has_versions_only: bool + :ivar object_replication_metadata: Dictionary of :code:``. + :vartype object_replication_metadata: dict[str, str] """ _validation = { - 'name': {'required': True}, - 'deleted': {'required': True}, - 'snapshot': {'required': True}, - 'properties': {'required': True}, + "name": {"required": True}, + "deleted": {"required": True}, + "snapshot": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, - 'deleted': {'key': 'Deleted', 'type': 'bool'}, - 'snapshot': {'key': 'Snapshot', 'type': 'str'}, - 'version_id': {'key': 'VersionId', 'type': 'str'}, - 'is_current_version': {'key': 'IsCurrentVersion', 'type': 'bool'}, - 'properties': {'key': 'Properties', 'type': 'BlobPropertiesInternal'}, - 'metadata': {'key': 'Metadata', 'type': 'BlobMetadata'}, - 'blob_tags': {'key': 'BlobTags', 'type': 'BlobTags'}, - 'object_replication_metadata': {'key': 'OrMetadata', 'type': '{str}'}, - } - _xml_map = { - 'name': 'Blob' - } + "name": {"key": "Name", "type": "BlobName"}, + "deleted": {"key": "Deleted", "type": "bool"}, + "snapshot": {"key": "Snapshot", "type": "str"}, + "version_id": {"key": "VersionId", "type": "str"}, + "is_current_version": {"key": "IsCurrentVersion", "type": "bool"}, + "properties": {"key": "Properties", "type": "BlobPropertiesInternal"}, + "metadata": {"key": "Metadata", "type": "BlobMetadata"}, + "blob_tags": {"key": "BlobTags", "type": "BlobTags"}, + "has_versions_only": {"key": "HasVersionsOnly", "type": "bool"}, + "object_replication_metadata": {"key": "OrMetadata", "type": "{str}"}, + } + _xml_map = {"name": "Blob"} def __init__( self, *, - name: str, + name: "_models.BlobName", deleted: bool, snapshot: str, - properties: "BlobPropertiesInternal", + properties: "_models.BlobPropertiesInternal", version_id: Optional[str] = None, is_current_version: Optional[bool] = None, - metadata: Optional["BlobMetadata"] = None, - blob_tags: Optional["BlobTags"] = None, + metadata: Optional["_models.BlobMetadata"] = None, + blob_tags: Optional["_models.BlobTags"] = None, + has_versions_only: Optional[bool] = None, object_replication_metadata: Optional[Dict[str, str]] = None, - **kwargs - ): - super(BlobItemInternal, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword name: Required. + :paramtype name: ~azure.storage.blob.models.BlobName + :keyword deleted: Required. + :paramtype deleted: bool + :keyword snapshot: Required. + :paramtype snapshot: str + :keyword version_id: + :paramtype version_id: str + :keyword is_current_version: + :paramtype is_current_version: bool + :keyword properties: Properties of a blob. Required. + :paramtype properties: ~azure.storage.blob.models.BlobPropertiesInternal + :keyword metadata: + :paramtype metadata: ~azure.storage.blob.models.BlobMetadata + :keyword blob_tags: Blob tags. + :paramtype blob_tags: ~azure.storage.blob.models.BlobTags + :keyword has_versions_only: + :paramtype has_versions_only: bool + :keyword object_replication_metadata: Dictionary of :code:``. + :paramtype object_replication_metadata: dict[str, str] + """ + super().__init__(**kwargs) self.name = name self.deleted = deleted self.snapshot = snapshot @@ -339,201 +415,235 @@ def __init__( self.properties = properties self.metadata = metadata self.blob_tags = blob_tags + self.has_versions_only = has_versions_only self.object_replication_metadata = object_replication_metadata -class BlobMetadata(msrest.serialization.Model): +class BlobMetadata(_serialization.Model): """BlobMetadata. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, str] - :param encrypted: - :type encrypted: str + :vartype additional_properties: dict[str, str] + :ivar encrypted: + :vartype encrypted: str """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{str}'}, - 'encrypted': {'key': 'Encrypted', 'type': 'str', 'xml': {'attr': True}}, - } - _xml_map = { - 'name': 'Metadata' + "additional_properties": {"key": "", "type": "{str}"}, + "encrypted": {"key": "Encrypted", "type": "str", "xml": {"attr": True}}, } + _xml_map = {"name": "Metadata"} def __init__( - self, - *, - additional_properties: Optional[Dict[str, str]] = None, - encrypted: Optional[str] = None, - **kwargs - ): - super(BlobMetadata, self).__init__(**kwargs) + self, *, additional_properties: Optional[Dict[str, str]] = None, encrypted: Optional[str] = None, **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, str] + :keyword encrypted: + :paramtype encrypted: str + """ + super().__init__(**kwargs) self.additional_properties = additional_properties self.encrypted = encrypted -class BlobPrefix(msrest.serialization.Model): +class BlobName(_serialization.Model): + """BlobName. + + :ivar encoded: Indicates if the blob name is encoded. + :vartype encoded: bool + :ivar content: The name of the blob. + :vartype content: str + """ + + _attribute_map = { + "encoded": {"key": "Encoded", "type": "bool", "xml": {"name": "Encoded", "attr": True}}, + "content": {"key": "content", "type": "str", "xml": {"text": True}}, + } + + def __init__(self, *, encoded: Optional[bool] = None, content: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword encoded: Indicates if the blob name is encoded. + :paramtype encoded: bool + :keyword content: The name of the blob. + :paramtype content: str + """ + super().__init__(**kwargs) + self.encoded = encoded + self.content = content + + +class BlobPrefix(_serialization.Model): """BlobPrefix. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param name: Required. - :type name: str + :ivar name: Required. + :vartype name: ~azure.storage.blob.models.BlobName """ _validation = { - 'name': {'required': True}, + "name": {"required": True}, } _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, + "name": {"key": "Name", "type": "BlobName"}, } - def __init__( - self, - *, - name: str, - **kwargs - ): - super(BlobPrefix, self).__init__(**kwargs) + def __init__(self, *, name: "_models.BlobName", **kwargs: Any) -> None: + """ + :keyword name: Required. + :paramtype name: ~azure.storage.blob.models.BlobName + """ + super().__init__(**kwargs) self.name = name -class BlobPropertiesInternal(msrest.serialization.Model): +class BlobPropertiesInternal(_serialization.Model): # pylint: disable=too-many-instance-attributes """Properties of a blob. - All required parameters must be populated in order to send to Azure. - - :param creation_time: - :type creation_time: ~datetime.datetime - :param last_modified: Required. - :type last_modified: ~datetime.datetime - :param etag: Required. - :type etag: str - :param content_length: Size in bytes. - :type content_length: long - :param content_type: - :type content_type: str - :param content_encoding: - :type content_encoding: str - :param content_language: - :type content_language: str - :param content_md5: - :type content_md5: bytearray - :param content_disposition: - :type content_disposition: str - :param cache_control: - :type cache_control: str - :param blob_sequence_number: - :type blob_sequence_number: long - :param blob_type: Possible values include: "BlockBlob", "PageBlob", "AppendBlob". - :type blob_type: str or ~azure.storage.blob.models.BlobType - :param lease_status: Possible values include: "locked", "unlocked". - :type lease_status: str or ~azure.storage.blob.models.LeaseStatusType - :param lease_state: Possible values include: "available", "leased", "expired", "breaking", + All required parameters must be populated in order to send to server. + + :ivar creation_time: + :vartype creation_time: ~datetime.datetime + :ivar last_modified: Required. + :vartype last_modified: ~datetime.datetime + :ivar etag: Required. + :vartype etag: str + :ivar content_length: Size in bytes. + :vartype content_length: int + :ivar content_type: + :vartype content_type: str + :ivar content_encoding: + :vartype content_encoding: str + :ivar content_language: + :vartype content_language: str + :ivar content_md5: + :vartype content_md5: bytes + :ivar content_disposition: + :vartype content_disposition: str + :ivar cache_control: + :vartype cache_control: str + :ivar blob_sequence_number: + :vartype blob_sequence_number: int + :ivar blob_type: Known values are: "BlockBlob", "PageBlob", and "AppendBlob". + :vartype blob_type: str or ~azure.storage.blob.models.BlobType + :ivar lease_status: Known values are: "locked" and "unlocked". + :vartype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :ivar lease_state: Known values are: "available", "leased", "expired", "breaking", and "broken". - :type lease_state: str or ~azure.storage.blob.models.LeaseStateType - :param lease_duration: Possible values include: "infinite", "fixed". - :type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType - :param copy_id: - :type copy_id: str - :param copy_status: Possible values include: "pending", "success", "aborted", "failed". - :type copy_status: str or ~azure.storage.blob.models.CopyStatusType - :param copy_source: - :type copy_source: str - :param copy_progress: - :type copy_progress: str - :param copy_completion_time: - :type copy_completion_time: ~datetime.datetime - :param copy_status_description: - :type copy_status_description: str - :param server_encrypted: - :type server_encrypted: bool - :param incremental_copy: - :type incremental_copy: bool - :param destination_snapshot: - :type destination_snapshot: str - :param deleted_time: - :type deleted_time: ~datetime.datetime - :param remaining_retention_days: - :type remaining_retention_days: int - :param access_tier: Possible values include: "P4", "P6", "P10", "P15", "P20", "P30", "P40", - "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive". - :type access_tier: str or ~azure.storage.blob.models.AccessTier - :param access_tier_inferred: - :type access_tier_inferred: bool - :param archive_status: Possible values include: "rehydrate-pending-to-hot", "rehydrate- - pending-to-cool". - :type archive_status: str or ~azure.storage.blob.models.ArchiveStatus - :param customer_provided_key_sha256: - :type customer_provided_key_sha256: str - :param encryption_scope: The name of the encryption scope under which the blob is encrypted. - :type encryption_scope: str - :param access_tier_change_time: - :type access_tier_change_time: ~datetime.datetime - :param tag_count: - :type tag_count: int - :param expires_on: - :type expires_on: ~datetime.datetime - :param is_sealed: - :type is_sealed: bool - :param rehydrate_priority: If an object is in rehydrate pending state then this header is - returned with priority of rehydrate. Valid values are High and Standard. Possible values - include: "High", "Standard". - :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority - :param last_accessed_on: - :type last_accessed_on: ~datetime.datetime + :vartype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :ivar lease_duration: Known values are: "infinite" and "fixed". + :vartype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :ivar copy_id: + :vartype copy_id: str + :ivar copy_status: Known values are: "pending", "success", "aborted", and "failed". + :vartype copy_status: str or ~azure.storage.blob.models.CopyStatusType + :ivar copy_source: + :vartype copy_source: str + :ivar copy_progress: + :vartype copy_progress: str + :ivar copy_completion_time: + :vartype copy_completion_time: ~datetime.datetime + :ivar copy_status_description: + :vartype copy_status_description: str + :ivar server_encrypted: + :vartype server_encrypted: bool + :ivar incremental_copy: + :vartype incremental_copy: bool + :ivar destination_snapshot: + :vartype destination_snapshot: str + :ivar deleted_time: + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: + :vartype remaining_retention_days: int + :ivar access_tier: Known values are: "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", + "P60", "P70", "P80", "Hot", "Cool", "Archive", "Premium", and "Cold". + :vartype access_tier: str or ~azure.storage.blob.models.AccessTier + :ivar access_tier_inferred: + :vartype access_tier_inferred: bool + :ivar archive_status: Known values are: "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool", and "rehydrate-pending-to-cold". + :vartype archive_status: str or ~azure.storage.blob.models.ArchiveStatus + :ivar customer_provided_key_sha256: + :vartype customer_provided_key_sha256: str + :ivar encryption_scope: The name of the encryption scope under which the blob is encrypted. + :vartype encryption_scope: str + :ivar access_tier_change_time: + :vartype access_tier_change_time: ~datetime.datetime + :ivar tag_count: + :vartype tag_count: int + :ivar expires_on: + :vartype expires_on: ~datetime.datetime + :ivar is_sealed: + :vartype is_sealed: bool + :ivar rehydrate_priority: If an object is in rehydrate pending state then this header is + returned with priority of rehydrate. Valid values are High and Standard. Known values are: + "High" and "Standard". + :vartype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :ivar last_accessed_on: + :vartype last_accessed_on: ~datetime.datetime + :ivar immutability_policy_expires_on: + :vartype immutability_policy_expires_on: ~datetime.datetime + :ivar immutability_policy_mode: Known values are: "Mutable", "Unlocked", and "Locked". + :vartype immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :ivar legal_hold: + :vartype legal_hold: bool """ _validation = { - 'last_modified': {'required': True}, - 'etag': {'required': True}, + "last_modified": {"required": True}, + "etag": {"required": True}, } _attribute_map = { - 'creation_time': {'key': 'Creation-Time', 'type': 'rfc-1123'}, - 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, - 'etag': {'key': 'Etag', 'type': 'str'}, - 'content_length': {'key': 'Content-Length', 'type': 'long'}, - 'content_type': {'key': 'Content-Type', 'type': 'str'}, - 'content_encoding': {'key': 'Content-Encoding', 'type': 'str'}, - 'content_language': {'key': 'Content-Language', 'type': 'str'}, - 'content_md5': {'key': 'Content-MD5', 'type': 'bytearray'}, - 'content_disposition': {'key': 'Content-Disposition', 'type': 'str'}, - 'cache_control': {'key': 'Cache-Control', 'type': 'str'}, - 'blob_sequence_number': {'key': 'x-ms-blob-sequence-number', 'type': 'long'}, - 'blob_type': {'key': 'BlobType', 'type': 'str'}, - 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, - 'lease_state': {'key': 'LeaseState', 'type': 'str'}, - 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, - 'copy_id': {'key': 'CopyId', 'type': 'str'}, - 'copy_status': {'key': 'CopyStatus', 'type': 'str'}, - 'copy_source': {'key': 'CopySource', 'type': 'str'}, - 'copy_progress': {'key': 'CopyProgress', 'type': 'str'}, - 'copy_completion_time': {'key': 'CopyCompletionTime', 'type': 'rfc-1123'}, - 'copy_status_description': {'key': 'CopyStatusDescription', 'type': 'str'}, - 'server_encrypted': {'key': 'ServerEncrypted', 'type': 'bool'}, - 'incremental_copy': {'key': 'IncrementalCopy', 'type': 'bool'}, - 'destination_snapshot': {'key': 'DestinationSnapshot', 'type': 'str'}, - 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, - 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, - 'access_tier': {'key': 'AccessTier', 'type': 'str'}, - 'access_tier_inferred': {'key': 'AccessTierInferred', 'type': 'bool'}, - 'archive_status': {'key': 'ArchiveStatus', 'type': 'str'}, - 'customer_provided_key_sha256': {'key': 'CustomerProvidedKeySha256', 'type': 'str'}, - 'encryption_scope': {'key': 'EncryptionScope', 'type': 'str'}, - 'access_tier_change_time': {'key': 'AccessTierChangeTime', 'type': 'rfc-1123'}, - 'tag_count': {'key': 'TagCount', 'type': 'int'}, - 'expires_on': {'key': 'Expiry-Time', 'type': 'rfc-1123'}, - 'is_sealed': {'key': 'Sealed', 'type': 'bool'}, - 'rehydrate_priority': {'key': 'RehydratePriority', 'type': 'str'}, - 'last_accessed_on': {'key': 'LastAccessTime', 'type': 'rfc-1123'}, - } - _xml_map = { - 'name': 'Properties' - } - - def __init__( + "creation_time": {"key": "Creation-Time", "type": "rfc-1123"}, + "last_modified": {"key": "Last-Modified", "type": "rfc-1123"}, + "etag": {"key": "Etag", "type": "str"}, + "content_length": {"key": "Content-Length", "type": "int"}, + "content_type": {"key": "Content-Type", "type": "str"}, + "content_encoding": {"key": "Content-Encoding", "type": "str"}, + "content_language": {"key": "Content-Language", "type": "str"}, + "content_md5": {"key": "Content-MD5", "type": "bytearray"}, + "content_disposition": {"key": "Content-Disposition", "type": "str"}, + "cache_control": {"key": "Cache-Control", "type": "str"}, + "blob_sequence_number": {"key": "x-ms-blob-sequence-number", "type": "int"}, + "blob_type": {"key": "BlobType", "type": "str"}, + "lease_status": {"key": "LeaseStatus", "type": "str"}, + "lease_state": {"key": "LeaseState", "type": "str"}, + "lease_duration": {"key": "LeaseDuration", "type": "str"}, + "copy_id": {"key": "CopyId", "type": "str"}, + "copy_status": {"key": "CopyStatus", "type": "str"}, + "copy_source": {"key": "CopySource", "type": "str"}, + "copy_progress": {"key": "CopyProgress", "type": "str"}, + "copy_completion_time": {"key": "CopyCompletionTime", "type": "rfc-1123"}, + "copy_status_description": {"key": "CopyStatusDescription", "type": "str"}, + "server_encrypted": {"key": "ServerEncrypted", "type": "bool"}, + "incremental_copy": {"key": "IncrementalCopy", "type": "bool"}, + "destination_snapshot": {"key": "DestinationSnapshot", "type": "str"}, + "deleted_time": {"key": "DeletedTime", "type": "rfc-1123"}, + "remaining_retention_days": {"key": "RemainingRetentionDays", "type": "int"}, + "access_tier": {"key": "AccessTier", "type": "str"}, + "access_tier_inferred": {"key": "AccessTierInferred", "type": "bool"}, + "archive_status": {"key": "ArchiveStatus", "type": "str"}, + "customer_provided_key_sha256": {"key": "CustomerProvidedKeySha256", "type": "str"}, + "encryption_scope": {"key": "EncryptionScope", "type": "str"}, + "access_tier_change_time": {"key": "AccessTierChangeTime", "type": "rfc-1123"}, + "tag_count": {"key": "TagCount", "type": "int"}, + "expires_on": {"key": "Expiry-Time", "type": "rfc-1123"}, + "is_sealed": {"key": "Sealed", "type": "bool"}, + "rehydrate_priority": {"key": "RehydratePriority", "type": "str"}, + "last_accessed_on": {"key": "LastAccessTime", "type": "rfc-1123"}, + "immutability_policy_expires_on": {"key": "ImmutabilityPolicyUntilDate", "type": "rfc-1123"}, + "immutability_policy_mode": {"key": "ImmutabilityPolicyMode", "type": "str"}, + "legal_hold": {"key": "LegalHold", "type": "bool"}, + } + _xml_map = {"name": "Properties"} + + def __init__( # pylint: disable=too-many-locals self, *, last_modified: datetime.datetime, @@ -543,16 +653,16 @@ def __init__( content_type: Optional[str] = None, content_encoding: Optional[str] = None, content_language: Optional[str] = None, - content_md5: Optional[bytearray] = None, + content_md5: Optional[bytes] = None, content_disposition: Optional[str] = None, cache_control: Optional[str] = None, blob_sequence_number: Optional[int] = None, - blob_type: Optional[Union[str, "BlobType"]] = None, - lease_status: Optional[Union[str, "LeaseStatusType"]] = None, - lease_state: Optional[Union[str, "LeaseStateType"]] = None, - lease_duration: Optional[Union[str, "LeaseDurationType"]] = None, + blob_type: Optional[Union[str, "_models.BlobType"]] = None, + lease_status: Optional[Union[str, "_models.LeaseStatusType"]] = None, + lease_state: Optional[Union[str, "_models.LeaseStateType"]] = None, + lease_duration: Optional[Union[str, "_models.LeaseDurationType"]] = None, copy_id: Optional[str] = None, - copy_status: Optional[Union[str, "CopyStatusType"]] = None, + copy_status: Optional[Union[str, "_models.CopyStatusType"]] = None, copy_source: Optional[str] = None, copy_progress: Optional[str] = None, copy_completion_time: Optional[datetime.datetime] = None, @@ -562,20 +672,111 @@ def __init__( destination_snapshot: Optional[str] = None, deleted_time: Optional[datetime.datetime] = None, remaining_retention_days: Optional[int] = None, - access_tier: Optional[Union[str, "AccessTier"]] = None, + access_tier: Optional[Union[str, "_models.AccessTier"]] = None, access_tier_inferred: Optional[bool] = None, - archive_status: Optional[Union[str, "ArchiveStatus"]] = None, + archive_status: Optional[Union[str, "_models.ArchiveStatus"]] = None, customer_provided_key_sha256: Optional[str] = None, encryption_scope: Optional[str] = None, access_tier_change_time: Optional[datetime.datetime] = None, tag_count: Optional[int] = None, expires_on: Optional[datetime.datetime] = None, is_sealed: Optional[bool] = None, - rehydrate_priority: Optional[Union[str, "RehydratePriority"]] = None, + rehydrate_priority: Optional[Union[str, "_models.RehydratePriority"]] = None, last_accessed_on: Optional[datetime.datetime] = None, - **kwargs - ): - super(BlobPropertiesInternal, self).__init__(**kwargs) + immutability_policy_expires_on: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + :keyword creation_time: + :paramtype creation_time: ~datetime.datetime + :keyword last_modified: Required. + :paramtype last_modified: ~datetime.datetime + :keyword etag: Required. + :paramtype etag: str + :keyword content_length: Size in bytes. + :paramtype content_length: int + :keyword content_type: + :paramtype content_type: str + :keyword content_encoding: + :paramtype content_encoding: str + :keyword content_language: + :paramtype content_language: str + :keyword content_md5: + :paramtype content_md5: bytes + :keyword content_disposition: + :paramtype content_disposition: str + :keyword cache_control: + :paramtype cache_control: str + :keyword blob_sequence_number: + :paramtype blob_sequence_number: int + :keyword blob_type: Known values are: "BlockBlob", "PageBlob", and "AppendBlob". + :paramtype blob_type: str or ~azure.storage.blob.models.BlobType + :keyword lease_status: Known values are: "locked" and "unlocked". + :paramtype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :keyword lease_state: Known values are: "available", "leased", "expired", "breaking", and + "broken". + :paramtype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :keyword lease_duration: Known values are: "infinite" and "fixed". + :paramtype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :keyword copy_id: + :paramtype copy_id: str + :keyword copy_status: Known values are: "pending", "success", "aborted", and "failed". + :paramtype copy_status: str or ~azure.storage.blob.models.CopyStatusType + :keyword copy_source: + :paramtype copy_source: str + :keyword copy_progress: + :paramtype copy_progress: str + :keyword copy_completion_time: + :paramtype copy_completion_time: ~datetime.datetime + :keyword copy_status_description: + :paramtype copy_status_description: str + :keyword server_encrypted: + :paramtype server_encrypted: bool + :keyword incremental_copy: + :paramtype incremental_copy: bool + :keyword destination_snapshot: + :paramtype destination_snapshot: str + :keyword deleted_time: + :paramtype deleted_time: ~datetime.datetime + :keyword remaining_retention_days: + :paramtype remaining_retention_days: int + :keyword access_tier: Known values are: "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", + "P60", "P70", "P80", "Hot", "Cool", "Archive", "Premium", and "Cold". + :paramtype access_tier: str or ~azure.storage.blob.models.AccessTier + :keyword access_tier_inferred: + :paramtype access_tier_inferred: bool + :keyword archive_status: Known values are: "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool", and "rehydrate-pending-to-cold". + :paramtype archive_status: str or ~azure.storage.blob.models.ArchiveStatus + :keyword customer_provided_key_sha256: + :paramtype customer_provided_key_sha256: str + :keyword encryption_scope: The name of the encryption scope under which the blob is encrypted. + :paramtype encryption_scope: str + :keyword access_tier_change_time: + :paramtype access_tier_change_time: ~datetime.datetime + :keyword tag_count: + :paramtype tag_count: int + :keyword expires_on: + :paramtype expires_on: ~datetime.datetime + :keyword is_sealed: + :paramtype is_sealed: bool + :keyword rehydrate_priority: If an object is in rehydrate pending state then this header is + returned with priority of rehydrate. Valid values are High and Standard. Known values are: + "High" and "Standard". + :paramtype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :keyword last_accessed_on: + :paramtype last_accessed_on: ~datetime.datetime + :keyword immutability_policy_expires_on: + :paramtype immutability_policy_expires_on: ~datetime.datetime + :keyword immutability_policy_mode: Known values are: "Mutable", "Unlocked", and "Locked". + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: + :paramtype legal_hold: bool + """ + super().__init__(**kwargs) self.creation_time = creation_time self.last_modified = last_modified self.etag = etag @@ -613,152 +814,158 @@ def __init__( self.is_sealed = is_sealed self.rehydrate_priority = rehydrate_priority self.last_accessed_on = last_accessed_on + self.immutability_policy_expires_on = immutability_policy_expires_on + self.immutability_policy_mode = immutability_policy_mode + self.legal_hold = legal_hold -class BlobTag(msrest.serialization.Model): +class BlobTag(_serialization.Model): """BlobTag. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param key: Required. - :type key: str - :param value: Required. - :type value: str + :ivar key: Required. + :vartype key: str + :ivar value: Required. + :vartype value: str """ _validation = { - 'key': {'required': True}, - 'value': {'required': True}, + "key": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'key': {'key': 'Key', 'type': 'str'}, - 'value': {'key': 'Value', 'type': 'str'}, - } - _xml_map = { - 'name': 'Tag' - } - - def __init__( - self, - *, - key: str, - value: str, - **kwargs - ): - super(BlobTag, self).__init__(**kwargs) + "key": {"key": "Key", "type": "str"}, + "value": {"key": "Value", "type": "str"}, + } + _xml_map = {"name": "Tag"} + + def __init__(self, *, key: str, value: str, **kwargs: Any) -> None: + """ + :keyword key: Required. + :paramtype key: str + :keyword value: Required. + :paramtype value: str + """ + super().__init__(**kwargs) self.key = key self.value = value -class BlobTags(msrest.serialization.Model): +class BlobTags(_serialization.Model): """Blob tags. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param blob_tag_set: Required. - :type blob_tag_set: list[~azure.storage.blob.models.BlobTag] + :ivar blob_tag_set: Required. + :vartype blob_tag_set: list[~azure.storage.blob.models.BlobTag] """ _validation = { - 'blob_tag_set': {'required': True}, + "blob_tag_set": {"required": True}, } _attribute_map = { - 'blob_tag_set': {'key': 'BlobTagSet', 'type': '[BlobTag]', 'xml': {'name': 'TagSet', 'wrapped': True, 'itemsName': 'Tag'}}, - } - _xml_map = { - 'name': 'Tags' - } - - def __init__( - self, - *, - blob_tag_set: List["BlobTag"], - **kwargs - ): - super(BlobTags, self).__init__(**kwargs) + "blob_tag_set": { + "key": "BlobTagSet", + "type": "[BlobTag]", + "xml": {"name": "TagSet", "wrapped": True, "itemsName": "Tag"}, + }, + } + _xml_map = {"name": "Tags"} + + def __init__(self, *, blob_tag_set: List["_models.BlobTag"], **kwargs: Any) -> None: + """ + :keyword blob_tag_set: Required. + :paramtype blob_tag_set: list[~azure.storage.blob.models.BlobTag] + """ + super().__init__(**kwargs) self.blob_tag_set = blob_tag_set -class Block(msrest.serialization.Model): +class Block(_serialization.Model): """Represents a single block in a block blob. It describes the block's ID and size. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param name: Required. The base64 encoded block ID. - :type name: str - :param size: Required. The block size in bytes. - :type size: int + :ivar name: The base64 encoded block ID. Required. + :vartype name: str + :ivar size: The block size in bytes. Required. + :vartype size: int """ _validation = { - 'name': {'required': True}, - 'size': {'required': True}, + "name": {"required": True}, + "size": {"required": True}, } _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, - 'size': {'key': 'Size', 'type': 'int'}, - } - - def __init__( - self, - *, - name: str, - size: int, - **kwargs - ): - super(Block, self).__init__(**kwargs) + "name": {"key": "Name", "type": "str"}, + "size": {"key": "Size", "type": "int"}, + } + + def __init__(self, *, name: str, size: int, **kwargs: Any) -> None: + """ + :keyword name: The base64 encoded block ID. Required. + :paramtype name: str + :keyword size: The block size in bytes. Required. + :paramtype size: int + """ + super().__init__(**kwargs) self.name = name self.size = size -class BlockList(msrest.serialization.Model): +class BlockList(_serialization.Model): """BlockList. - :param committed_blocks: - :type committed_blocks: list[~azure.storage.blob.models.Block] - :param uncommitted_blocks: - :type uncommitted_blocks: list[~azure.storage.blob.models.Block] + :ivar committed_blocks: + :vartype committed_blocks: list[~azure.storage.blob.models.Block] + :ivar uncommitted_blocks: + :vartype uncommitted_blocks: list[~azure.storage.blob.models.Block] """ _attribute_map = { - 'committed_blocks': {'key': 'CommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, - 'uncommitted_blocks': {'key': 'UncommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, + "committed_blocks": {"key": "CommittedBlocks", "type": "[Block]", "xml": {"wrapped": True}}, + "uncommitted_blocks": {"key": "UncommittedBlocks", "type": "[Block]", "xml": {"wrapped": True}}, } def __init__( self, *, - committed_blocks: Optional[List["Block"]] = None, - uncommitted_blocks: Optional[List["Block"]] = None, - **kwargs - ): - super(BlockList, self).__init__(**kwargs) + committed_blocks: Optional[List["_models.Block"]] = None, + uncommitted_blocks: Optional[List["_models.Block"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword committed_blocks: + :paramtype committed_blocks: list[~azure.storage.blob.models.Block] + :keyword uncommitted_blocks: + :paramtype uncommitted_blocks: list[~azure.storage.blob.models.Block] + """ + super().__init__(**kwargs) self.committed_blocks = committed_blocks self.uncommitted_blocks = uncommitted_blocks -class BlockLookupList(msrest.serialization.Model): +class BlockLookupList(_serialization.Model): """BlockLookupList. - :param committed: - :type committed: list[str] - :param uncommitted: - :type uncommitted: list[str] - :param latest: - :type latest: list[str] + :ivar committed: + :vartype committed: list[str] + :ivar uncommitted: + :vartype uncommitted: list[str] + :ivar latest: + :vartype latest: list[str] """ _attribute_map = { - 'committed': {'key': 'Committed', 'type': '[str]', 'xml': {'itemsName': 'Committed'}}, - 'uncommitted': {'key': 'Uncommitted', 'type': '[str]', 'xml': {'itemsName': 'Uncommitted'}}, - 'latest': {'key': 'Latest', 'type': '[str]', 'xml': {'itemsName': 'Latest'}}, - } - _xml_map = { - 'name': 'BlockList' + "committed": {"key": "Committed", "type": "[str]", "xml": {"itemsName": "Committed"}}, + "uncommitted": {"key": "Uncommitted", "type": "[str]", "xml": {"itemsName": "Uncommitted"}}, + "latest": {"key": "Latest", "type": "[str]", "xml": {"itemsName": "Latest"}}, } + _xml_map = {"name": "BlockList"} def __init__( self, @@ -766,65 +973,71 @@ def __init__( committed: Optional[List[str]] = None, uncommitted: Optional[List[str]] = None, latest: Optional[List[str]] = None, - **kwargs - ): - super(BlockLookupList, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword committed: + :paramtype committed: list[str] + :keyword uncommitted: + :paramtype uncommitted: list[str] + :keyword latest: + :paramtype latest: list[str] + """ + super().__init__(**kwargs) self.committed = committed self.uncommitted = uncommitted self.latest = latest -class ClearRange(msrest.serialization.Model): +class ClearRange(_serialization.Model): """ClearRange. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param start: Required. - :type start: long - :param end: Required. - :type end: long + :ivar start: Required. + :vartype start: int + :ivar end: Required. + :vartype end: int """ _validation = { - 'start': {'required': True}, - 'end': {'required': True}, + "start": {"required": True}, + "end": {"required": True}, } _attribute_map = { - 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, - 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, - } - _xml_map = { - 'name': 'ClearRange' - } - - def __init__( - self, - *, - start: int, - end: int, - **kwargs - ): - super(ClearRange, self).__init__(**kwargs) + "start": {"key": "Start", "type": "int", "xml": {"name": "Start"}}, + "end": {"key": "End", "type": "int", "xml": {"name": "End"}}, + } + _xml_map = {"name": "ClearRange"} + + def __init__(self, *, start: int, end: int, **kwargs: Any) -> None: + """ + :keyword start: Required. + :paramtype start: int + :keyword end: Required. + :paramtype end: int + """ + super().__init__(**kwargs) self.start = start self.end = end -class ContainerCpkScopeInfo(msrest.serialization.Model): +class ContainerCpkScopeInfo(_serialization.Model): """Parameter group. - :param default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the - default encryption scope to set on the container and use for all future writes. - :type default_encryption_scope: str - :param prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, + :ivar default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the default + encryption scope to set on the container and use for all future writes. + :vartype default_encryption_scope: str + :ivar prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, prevents any request from specifying a different encryption scope than the scope set on the container. - :type prevent_encryption_scope_override: bool + :vartype prevent_encryption_scope_override: bool """ _attribute_map = { - 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, - 'prevent_encryption_scope_override': {'key': 'PreventEncryptionScopeOverride', 'type': 'bool'}, + "default_encryption_scope": {"key": "DefaultEncryptionScope", "type": "str"}, + "prevent_encryption_scope_override": {"key": "PreventEncryptionScopeOverride", "type": "bool"}, } def __init__( @@ -832,57 +1045,76 @@ def __init__( *, default_encryption_scope: Optional[str] = None, prevent_encryption_scope_override: Optional[bool] = None, - **kwargs - ): - super(ContainerCpkScopeInfo, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the + default encryption scope to set on the container and use for all future writes. + :paramtype default_encryption_scope: str + :keyword prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, + prevents any request from specifying a different encryption scope than the scope set on the + container. + :paramtype prevent_encryption_scope_override: bool + """ + super().__init__(**kwargs) self.default_encryption_scope = default_encryption_scope self.prevent_encryption_scope_override = prevent_encryption_scope_override -class ContainerItem(msrest.serialization.Model): +class ContainerItem(_serialization.Model): """An Azure Storage container. - All required parameters must be populated in order to send to Azure. - - :param name: Required. - :type name: str - :param deleted: - :type deleted: bool - :param version: - :type version: str - :param properties: Required. Properties of a container. - :type properties: ~azure.storage.blob.models.ContainerProperties - :param metadata: Dictionary of :code:``. - :type metadata: dict[str, str] + All required parameters must be populated in order to send to server. + + :ivar name: Required. + :vartype name: str + :ivar deleted: + :vartype deleted: bool + :ivar version: + :vartype version: str + :ivar properties: Properties of a container. Required. + :vartype properties: ~azure.storage.blob.models.ContainerProperties + :ivar metadata: Dictionary of :code:``. + :vartype metadata: dict[str, str] """ _validation = { - 'name': {'required': True}, - 'properties': {'required': True}, + "name": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, - 'deleted': {'key': 'Deleted', 'type': 'bool'}, - 'version': {'key': 'Version', 'type': 'str'}, - 'properties': {'key': 'Properties', 'type': 'ContainerProperties'}, - 'metadata': {'key': 'Metadata', 'type': '{str}'}, - } - _xml_map = { - 'name': 'Container' + "name": {"key": "Name", "type": "str"}, + "deleted": {"key": "Deleted", "type": "bool"}, + "version": {"key": "Version", "type": "str"}, + "properties": {"key": "Properties", "type": "ContainerProperties"}, + "metadata": {"key": "Metadata", "type": "{str}"}, } + _xml_map = {"name": "Container"} def __init__( self, *, name: str, - properties: "ContainerProperties", + properties: "_models.ContainerProperties", deleted: Optional[bool] = None, version: Optional[str] = None, metadata: Optional[Dict[str, str]] = None, - **kwargs - ): - super(ContainerItem, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword name: Required. + :paramtype name: str + :keyword deleted: + :paramtype deleted: bool + :keyword version: + :paramtype version: str + :keyword properties: Properties of a container. Required. + :paramtype properties: ~azure.storage.blob.models.ContainerProperties + :keyword metadata: Dictionary of :code:``. + :paramtype metadata: dict[str, str] + """ + super().__init__(**kwargs) self.name = name self.deleted = deleted self.version = version @@ -890,56 +1122,63 @@ def __init__( self.metadata = metadata -class ContainerProperties(msrest.serialization.Model): +class ContainerProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes """Properties of a container. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param last_modified: Required. - :type last_modified: ~datetime.datetime - :param etag: Required. - :type etag: str - :param lease_status: Possible values include: "locked", "unlocked". - :type lease_status: str or ~azure.storage.blob.models.LeaseStatusType - :param lease_state: Possible values include: "available", "leased", "expired", "breaking", + :ivar last_modified: Required. + :vartype last_modified: ~datetime.datetime + :ivar etag: Required. + :vartype etag: str + :ivar lease_status: Known values are: "locked" and "unlocked". + :vartype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :ivar lease_state: Known values are: "available", "leased", "expired", "breaking", and "broken". - :type lease_state: str or ~azure.storage.blob.models.LeaseStateType - :param lease_duration: Possible values include: "infinite", "fixed". - :type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType - :param public_access: Possible values include: "container", "blob". - :type public_access: str or ~azure.storage.blob.models.PublicAccessType - :param has_immutability_policy: - :type has_immutability_policy: bool - :param has_legal_hold: - :type has_legal_hold: bool - :param default_encryption_scope: - :type default_encryption_scope: str - :param prevent_encryption_scope_override: - :type prevent_encryption_scope_override: bool - :param deleted_time: - :type deleted_time: ~datetime.datetime - :param remaining_retention_days: - :type remaining_retention_days: int + :vartype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :ivar lease_duration: Known values are: "infinite" and "fixed". + :vartype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :ivar public_access: Known values are: "container" and "blob". + :vartype public_access: str or ~azure.storage.blob.models.PublicAccessType + :ivar has_immutability_policy: + :vartype has_immutability_policy: bool + :ivar has_legal_hold: + :vartype has_legal_hold: bool + :ivar default_encryption_scope: + :vartype default_encryption_scope: str + :ivar prevent_encryption_scope_override: + :vartype prevent_encryption_scope_override: bool + :ivar deleted_time: + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: + :vartype remaining_retention_days: int + :ivar is_immutable_storage_with_versioning_enabled: Indicates if version level worm is enabled + on this container. + :vartype is_immutable_storage_with_versioning_enabled: bool """ _validation = { - 'last_modified': {'required': True}, - 'etag': {'required': True}, + "last_modified": {"required": True}, + "etag": {"required": True}, } _attribute_map = { - 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, - 'etag': {'key': 'Etag', 'type': 'str'}, - 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, - 'lease_state': {'key': 'LeaseState', 'type': 'str'}, - 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, - 'public_access': {'key': 'PublicAccess', 'type': 'str'}, - 'has_immutability_policy': {'key': 'HasImmutabilityPolicy', 'type': 'bool'}, - 'has_legal_hold': {'key': 'HasLegalHold', 'type': 'bool'}, - 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, - 'prevent_encryption_scope_override': {'key': 'DenyEncryptionScopeOverride', 'type': 'bool'}, - 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, - 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, + "last_modified": {"key": "Last-Modified", "type": "rfc-1123"}, + "etag": {"key": "Etag", "type": "str"}, + "lease_status": {"key": "LeaseStatus", "type": "str"}, + "lease_state": {"key": "LeaseState", "type": "str"}, + "lease_duration": {"key": "LeaseDuration", "type": "str"}, + "public_access": {"key": "PublicAccess", "type": "str"}, + "has_immutability_policy": {"key": "HasImmutabilityPolicy", "type": "bool"}, + "has_legal_hold": {"key": "HasLegalHold", "type": "bool"}, + "default_encryption_scope": {"key": "DefaultEncryptionScope", "type": "str"}, + "prevent_encryption_scope_override": {"key": "DenyEncryptionScopeOverride", "type": "bool"}, + "deleted_time": {"key": "DeletedTime", "type": "rfc-1123"}, + "remaining_retention_days": {"key": "RemainingRetentionDays", "type": "int"}, + "is_immutable_storage_with_versioning_enabled": { + "key": "ImmutableStorageWithVersioningEnabled", + "type": "bool", + }, } def __init__( @@ -947,19 +1186,50 @@ def __init__( *, last_modified: datetime.datetime, etag: str, - lease_status: Optional[Union[str, "LeaseStatusType"]] = None, - lease_state: Optional[Union[str, "LeaseStateType"]] = None, - lease_duration: Optional[Union[str, "LeaseDurationType"]] = None, - public_access: Optional[Union[str, "PublicAccessType"]] = None, + lease_status: Optional[Union[str, "_models.LeaseStatusType"]] = None, + lease_state: Optional[Union[str, "_models.LeaseStateType"]] = None, + lease_duration: Optional[Union[str, "_models.LeaseDurationType"]] = None, + public_access: Optional[Union[str, "_models.PublicAccessType"]] = None, has_immutability_policy: Optional[bool] = None, has_legal_hold: Optional[bool] = None, default_encryption_scope: Optional[str] = None, prevent_encryption_scope_override: Optional[bool] = None, deleted_time: Optional[datetime.datetime] = None, remaining_retention_days: Optional[int] = None, - **kwargs - ): - super(ContainerProperties, self).__init__(**kwargs) + is_immutable_storage_with_versioning_enabled: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + :keyword last_modified: Required. + :paramtype last_modified: ~datetime.datetime + :keyword etag: Required. + :paramtype etag: str + :keyword lease_status: Known values are: "locked" and "unlocked". + :paramtype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :keyword lease_state: Known values are: "available", "leased", "expired", "breaking", and + "broken". + :paramtype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :keyword lease_duration: Known values are: "infinite" and "fixed". + :paramtype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :keyword public_access: Known values are: "container" and "blob". + :paramtype public_access: str or ~azure.storage.blob.models.PublicAccessType + :keyword has_immutability_policy: + :paramtype has_immutability_policy: bool + :keyword has_legal_hold: + :paramtype has_legal_hold: bool + :keyword default_encryption_scope: + :paramtype default_encryption_scope: str + :keyword prevent_encryption_scope_override: + :paramtype prevent_encryption_scope_override: bool + :keyword deleted_time: + :paramtype deleted_time: ~datetime.datetime + :keyword remaining_retention_days: + :paramtype remaining_retention_days: int + :keyword is_immutable_storage_with_versioning_enabled: Indicates if version level worm is + enabled on this container. + :paramtype is_immutable_storage_with_versioning_enabled: bool + """ + super().__init__(**kwargs) self.last_modified = last_modified self.etag = etag self.lease_status = lease_status @@ -972,47 +1242,51 @@ def __init__( self.prevent_encryption_scope_override = prevent_encryption_scope_override self.deleted_time = deleted_time self.remaining_retention_days = remaining_retention_days - - -class CorsRule(msrest.serialization.Model): - """CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain. - - All required parameters must be populated in order to send to Azure. - - :param allowed_origins: Required. The origin domains that are permitted to make a request - against the storage service via CORS. The origin domain is the domain from which the request - originates. Note that the origin must be an exact case-sensitive match with the origin that the - user age sends to the service. You can also use the wildcard character '*' to allow all origin - domains to make requests via CORS. - :type allowed_origins: str - :param allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may - use for a CORS request. (comma separated). - :type allowed_methods: str - :param allowed_headers: Required. the request headers that the origin domain may specify on the - CORS request. - :type allowed_headers: str - :param exposed_headers: Required. The response headers that may be sent in the response to the - CORS request and exposed by the browser to the request issuer. - :type exposed_headers: str - :param max_age_in_seconds: Required. The maximum amount time that a browser should cache the - preflight OPTIONS request. - :type max_age_in_seconds: int + self.is_immutable_storage_with_versioning_enabled = is_immutable_storage_with_versioning_enabled + + +class CorsRule(_serialization.Model): + """CORS is an HTTP feature that enables a web application running under one domain to access + resources in another domain. Web browsers implement a security restriction known as same-origin + policy that prevents a web page from calling APIs in a different domain; CORS provides a secure + way to allow one domain (the origin domain) to call APIs in another domain. + + All required parameters must be populated in order to send to server. + + :ivar allowed_origins: The origin domains that are permitted to make a request against the + storage service via CORS. The origin domain is the domain from which the request originates. + Note that the origin must be an exact case-sensitive match with the origin that the user age + sends to the service. You can also use the wildcard character '*' to allow all origin domains + to make requests via CORS. Required. + :vartype allowed_origins: str + :ivar allowed_methods: The methods (HTTP request verbs) that the origin domain may use for a + CORS request. (comma separated). Required. + :vartype allowed_methods: str + :ivar allowed_headers: the request headers that the origin domain may specify on the CORS + request. Required. + :vartype allowed_headers: str + :ivar exposed_headers: The response headers that may be sent in the response to the CORS + request and exposed by the browser to the request issuer. Required. + :vartype exposed_headers: str + :ivar max_age_in_seconds: The maximum amount time that a browser should cache the preflight + OPTIONS request. Required. + :vartype max_age_in_seconds: int """ _validation = { - 'allowed_origins': {'required': True}, - 'allowed_methods': {'required': True}, - 'allowed_headers': {'required': True}, - 'exposed_headers': {'required': True}, - 'max_age_in_seconds': {'required': True, 'minimum': 0}, + "allowed_origins": {"required": True}, + "allowed_methods": {"required": True}, + "allowed_headers": {"required": True}, + "exposed_headers": {"required": True}, + "max_age_in_seconds": {"required": True, "minimum": 0}, } _attribute_map = { - 'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str'}, - 'allowed_methods': {'key': 'AllowedMethods', 'type': 'str'}, - 'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str'}, - 'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str'}, - 'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int'}, + "allowed_origins": {"key": "AllowedOrigins", "type": "str"}, + "allowed_methods": {"key": "AllowedMethods", "type": "str"}, + "allowed_headers": {"key": "AllowedHeaders", "type": "str"}, + "exposed_headers": {"key": "ExposedHeaders", "type": "str"}, + "max_age_in_seconds": {"key": "MaxAgeInSeconds", "type": "int"}, } def __init__( @@ -1023,9 +1297,29 @@ def __init__( allowed_headers: str, exposed_headers: str, max_age_in_seconds: int, - **kwargs - ): - super(CorsRule, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword allowed_origins: The origin domains that are permitted to make a request against the + storage service via CORS. The origin domain is the domain from which the request originates. + Note that the origin must be an exact case-sensitive match with the origin that the user age + sends to the service. You can also use the wildcard character '*' to allow all origin domains + to make requests via CORS. Required. + :paramtype allowed_origins: str + :keyword allowed_methods: The methods (HTTP request verbs) that the origin domain may use for a + CORS request. (comma separated). Required. + :paramtype allowed_methods: str + :keyword allowed_headers: the request headers that the origin domain may specify on the CORS + request. Required. + :paramtype allowed_headers: str + :keyword exposed_headers: The response headers that may be sent in the response to the CORS + request and exposed by the browser to the request issuer. Required. + :paramtype exposed_headers: str + :keyword max_age_in_seconds: The maximum amount time that a browser should cache the preflight + OPTIONS request. Required. + :paramtype max_age_in_seconds: int + """ + super().__init__(**kwargs) self.allowed_origins = allowed_origins self.allowed_methods = allowed_methods self.allowed_headers = allowed_headers @@ -1033,21 +1327,26 @@ def __init__( self.max_age_in_seconds = max_age_in_seconds -class CpkInfo(msrest.serialization.Model): +class CpkInfo(_serialization.Model): """Parameter group. - :param encryption_key: Optional. Specifies the encryption key to use to encrypt the data + :ivar encryption_key: Optional. Specifies the encryption key to use to encrypt the data provided in the request. If not specified, encryption is performed with the root account encryption key. For more information, see Encryption at Rest for Azure Storage Services. - :type encryption_key: str - :param encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be provided + :vartype encryption_key: str + :ivar encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be provided if the x-ms-encryption-key header is provided. - :type encryption_key_sha256: str + :vartype encryption_key_sha256: str + :ivar encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, + the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is + provided. Known values are: "None" and "AES256". + :vartype encryption_algorithm: str or ~azure.storage.blob.models.EncryptionAlgorithmType """ _attribute_map = { - 'encryption_key': {'key': 'encryptionKey', 'type': 'str'}, - 'encryption_key_sha256': {'key': 'encryptionKeySha256', 'type': 'str'}, + "encryption_key": {"key": "encryptionKey", "type": "str"}, + "encryption_key_sha256": {"key": "encryptionKeySha256", "type": "str"}, + "encryption_algorithm": {"key": "encryptionAlgorithm", "type": "str"}, } def __init__( @@ -1055,131 +1354,102 @@ def __init__( *, encryption_key: Optional[str] = None, encryption_key_sha256: Optional[str] = None, - **kwargs - ): - super(CpkInfo, self).__init__(**kwargs) + encryption_algorithm: Optional[Union[str, "_models.EncryptionAlgorithmType"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. For more information, see Encryption at Rest for Azure Storage Services. + :paramtype encryption_key: str + :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be + provided if the x-ms-encryption-key header is provided. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. + Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key + header is provided. Known values are: "None" and "AES256". + :paramtype encryption_algorithm: str or ~azure.storage.blob.models.EncryptionAlgorithmType + """ + super().__init__(**kwargs) self.encryption_key = encryption_key self.encryption_key_sha256 = encryption_key_sha256 + self.encryption_algorithm = encryption_algorithm -class CpkScopeInfo(msrest.serialization.Model): +class CpkScopeInfo(_serialization.Model): """Parameter group. - :param encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the + :ivar encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. - :type encryption_scope: str + :vartype encryption_scope: str """ _attribute_map = { - 'encryption_scope': {'key': 'encryptionScope', 'type': 'str'}, - } - - def __init__( - self, - *, - encryption_scope: Optional[str] = None, - **kwargs - ): - super(CpkScopeInfo, self).__init__(**kwargs) + "encryption_scope": {"key": "encryptionScope", "type": "str"}, + } + + def __init__(self, *, encryption_scope: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the + encryption scope to use to encrypt the data provided in the request. If not specified, + encryption is performed with the default account encryption scope. For more information, see + Encryption at Rest for Azure Storage Services. + :paramtype encryption_scope: str + """ + super().__init__(**kwargs) self.encryption_scope = encryption_scope -class DataLakeStorageError(msrest.serialization.Model): - """DataLakeStorageError. - - :param data_lake_storage_error_details: The service error response object. - :type data_lake_storage_error_details: ~azure.storage.blob.models.DataLakeStorageErrorDetails - """ - - _attribute_map = { - 'data_lake_storage_error_details': {'key': 'error', 'type': 'DataLakeStorageErrorDetails'}, - } - - def __init__( - self, - *, - data_lake_storage_error_details: Optional["DataLakeStorageErrorDetails"] = None, - **kwargs - ): - super(DataLakeStorageError, self).__init__(**kwargs) - self.data_lake_storage_error_details = data_lake_storage_error_details - - -class DataLakeStorageErrorDetails(msrest.serialization.Model): - """The service error response object. - - :param code: The service error code. - :type code: str - :param message: The service error message. - :type message: str +class DelimitedTextConfiguration(_serialization.Model): + """Groups the settings used for interpreting the blob data if the blob is delimited text + formatted. + + :ivar column_separator: The string used to separate columns. + :vartype column_separator: str + :ivar field_quote: The string used to quote a specific field. + :vartype field_quote: str + :ivar record_separator: The string used to separate records. + :vartype record_separator: str + :ivar escape_char: The string used as an escape character. + :vartype escape_char: str + :ivar headers_present: Represents whether the data has headers. + :vartype headers_present: bool """ _attribute_map = { - 'code': {'key': 'Code', 'type': 'str'}, - 'message': {'key': 'Message', 'type': 'str'}, + "column_separator": {"key": "ColumnSeparator", "type": "str", "xml": {"name": "ColumnSeparator"}}, + "field_quote": {"key": "FieldQuote", "type": "str", "xml": {"name": "FieldQuote"}}, + "record_separator": {"key": "RecordSeparator", "type": "str", "xml": {"name": "RecordSeparator"}}, + "escape_char": {"key": "EscapeChar", "type": "str", "xml": {"name": "EscapeChar"}}, + "headers_present": {"key": "HeadersPresent", "type": "bool", "xml": {"name": "HasHeaders"}}, } + _xml_map = {"name": "DelimitedTextConfiguration"} def __init__( self, *, - code: Optional[str] = None, - message: Optional[str] = None, - **kwargs - ): - super(DataLakeStorageErrorDetails, self).__init__(**kwargs) - self.code = code - self.message = message - - -class DelimitedTextConfiguration(msrest.serialization.Model): - """delimited text configuration. - - All required parameters must be populated in order to send to Azure. - - :param column_separator: Required. column separator. - :type column_separator: str - :param field_quote: Required. field quote. - :type field_quote: str - :param record_separator: Required. record separator. - :type record_separator: str - :param escape_char: Required. escape char. - :type escape_char: str - :param headers_present: Required. has headers. - :type headers_present: bool - """ - - _validation = { - 'column_separator': {'required': True}, - 'field_quote': {'required': True}, - 'record_separator': {'required': True}, - 'escape_char': {'required': True}, - 'headers_present': {'required': True}, - } - - _attribute_map = { - 'column_separator': {'key': 'ColumnSeparator', 'type': 'str', 'xml': {'name': 'ColumnSeparator'}}, - 'field_quote': {'key': 'FieldQuote', 'type': 'str', 'xml': {'name': 'FieldQuote'}}, - 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, - 'escape_char': {'key': 'EscapeChar', 'type': 'str', 'xml': {'name': 'EscapeChar'}}, - 'headers_present': {'key': 'HeadersPresent', 'type': 'bool', 'xml': {'name': 'HasHeaders'}}, - } - _xml_map = { - 'name': 'DelimitedTextConfiguration' - } - - def __init__( - self, - *, - column_separator: str, - field_quote: str, - record_separator: str, - escape_char: str, - headers_present: bool, - **kwargs - ): - super(DelimitedTextConfiguration, self).__init__(**kwargs) + column_separator: Optional[str] = None, + field_quote: Optional[str] = None, + record_separator: Optional[str] = None, + escape_char: Optional[str] = None, + headers_present: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + :keyword column_separator: The string used to separate columns. + :paramtype column_separator: str + :keyword field_quote: The string used to quote a specific field. + :paramtype field_quote: str + :keyword record_separator: The string used to separate records. + :paramtype record_separator: str + :keyword escape_char: The string used as an escape character. + :paramtype escape_char: str + :keyword headers_present: Represents whether the data has headers. + :paramtype headers_present: bool + """ + super().__init__(**kwargs) self.column_separator = column_separator self.field_quote = field_quote self.record_separator = record_separator @@ -1187,309 +1457,314 @@ def __init__( self.headers_present = headers_present -class DirectoryHttpHeaders(msrest.serialization.Model): - """Parameter group. - - :param cache_control: Cache control for given resource. - :type cache_control: str - :param content_type: Content type for given resource. - :type content_type: str - :param content_encoding: Content encoding for given resource. - :type content_encoding: str - :param content_language: Content language for given resource. - :type content_language: str - :param content_disposition: Content disposition for given resource. - :type content_disposition: str - """ - - _attribute_map = { - 'cache_control': {'key': 'cacheControl', 'type': 'str'}, - 'content_type': {'key': 'contentType', 'type': 'str'}, - 'content_encoding': {'key': 'contentEncoding', 'type': 'str'}, - 'content_language': {'key': 'contentLanguage', 'type': 'str'}, - 'content_disposition': {'key': 'contentDisposition', 'type': 'str'}, - } - - def __init__( - self, - *, - cache_control: Optional[str] = None, - content_type: Optional[str] = None, - content_encoding: Optional[str] = None, - content_language: Optional[str] = None, - content_disposition: Optional[str] = None, - **kwargs - ): - super(DirectoryHttpHeaders, self).__init__(**kwargs) - self.cache_control = cache_control - self.content_type = content_type - self.content_encoding = content_encoding - self.content_language = content_language - self.content_disposition = content_disposition - - -class FilterBlobItem(msrest.serialization.Model): +class FilterBlobItem(_serialization.Model): """Blob info from a Filter Blobs API call. - All required parameters must be populated in order to send to Azure. - - :param name: Required. - :type name: str - :param container_name: Required. - :type container_name: str - :param tags: A set of tags. Blob tags. - :type tags: ~azure.storage.blob.models.BlobTags + All required parameters must be populated in order to send to server. + + :ivar name: Required. + :vartype name: str + :ivar container_name: Required. + :vartype container_name: str + :ivar tags: Blob tags. + :vartype tags: ~azure.storage.blob.models.BlobTags + :ivar version_id: + :vartype version_id: str + :ivar is_current_version: + :vartype is_current_version: bool """ _validation = { - 'name': {'required': True}, - 'container_name': {'required': True}, + "name": {"required": True}, + "container_name": {"required": True}, } _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, - 'container_name': {'key': 'ContainerName', 'type': 'str'}, - 'tags': {'key': 'Tags', 'type': 'BlobTags'}, - } - _xml_map = { - 'name': 'Blob' + "name": {"key": "Name", "type": "str"}, + "container_name": {"key": "ContainerName", "type": "str"}, + "tags": {"key": "Tags", "type": "BlobTags"}, + "version_id": {"key": "VersionId", "type": "str"}, + "is_current_version": {"key": "IsCurrentVersion", "type": "bool"}, } + _xml_map = {"name": "Blob"} def __init__( self, *, name: str, container_name: str, - tags: Optional["BlobTags"] = None, - **kwargs - ): - super(FilterBlobItem, self).__init__(**kwargs) + tags: Optional["_models.BlobTags"] = None, + version_id: Optional[str] = None, + is_current_version: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + :keyword name: Required. + :paramtype name: str + :keyword container_name: Required. + :paramtype container_name: str + :keyword tags: Blob tags. + :paramtype tags: ~azure.storage.blob.models.BlobTags + :keyword version_id: + :paramtype version_id: str + :keyword is_current_version: + :paramtype is_current_version: bool + """ + super().__init__(**kwargs) self.name = name self.container_name = container_name self.tags = tags + self.version_id = version_id + self.is_current_version = is_current_version -class FilterBlobSegment(msrest.serialization.Model): +class FilterBlobSegment(_serialization.Model): """The result of a Filter Blobs API call. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param service_endpoint: Required. - :type service_endpoint: str - :param where: Required. - :type where: str - :param blobs: Required. - :type blobs: list[~azure.storage.blob.models.FilterBlobItem] - :param next_marker: - :type next_marker: str + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar where: Required. + :vartype where: str + :ivar blobs: Required. + :vartype blobs: list[~azure.storage.blob.models.FilterBlobItem] + :ivar next_marker: + :vartype next_marker: str """ _validation = { - 'service_endpoint': {'required': True}, - 'where': {'required': True}, - 'blobs': {'required': True}, + "service_endpoint": {"required": True}, + "where": {"required": True}, + "blobs": {"required": True}, } _attribute_map = { - 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, - 'where': {'key': 'Where', 'type': 'str'}, - 'blobs': {'key': 'Blobs', 'type': '[FilterBlobItem]', 'xml': {'name': 'Blobs', 'wrapped': True, 'itemsName': 'Blob'}}, - 'next_marker': {'key': 'NextMarker', 'type': 'str'}, - } - _xml_map = { - 'name': 'EnumerationResults' + "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, + "where": {"key": "Where", "type": "str"}, + "blobs": { + "key": "Blobs", + "type": "[FilterBlobItem]", + "xml": {"name": "Blobs", "wrapped": True, "itemsName": "Blob"}, + }, + "next_marker": {"key": "NextMarker", "type": "str"}, } + _xml_map = {"name": "EnumerationResults"} def __init__( self, *, service_endpoint: str, where: str, - blobs: List["FilterBlobItem"], + blobs: List["_models.FilterBlobItem"], next_marker: Optional[str] = None, - **kwargs - ): - super(FilterBlobSegment, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword where: Required. + :paramtype where: str + :keyword blobs: Required. + :paramtype blobs: list[~azure.storage.blob.models.FilterBlobItem] + :keyword next_marker: + :paramtype next_marker: str + """ + super().__init__(**kwargs) self.service_endpoint = service_endpoint self.where = where self.blobs = blobs self.next_marker = next_marker -class GeoReplication(msrest.serialization.Model): +class GeoReplication(_serialization.Model): """Geo-Replication information for the Secondary Storage Service. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param status: Required. The status of the secondary location. Possible values include: "live", - "bootstrap", "unavailable". - :type status: str or ~azure.storage.blob.models.GeoReplicationStatusType - :param last_sync_time: Required. A GMT date/time value, to the second. All primary writes - preceding this value are guaranteed to be available for read operations at the secondary. - Primary writes after this point in time may or may not be available for reads. - :type last_sync_time: ~datetime.datetime + :ivar status: The status of the secondary location. Required. Known values are: "live", + "bootstrap", and "unavailable". + :vartype status: str or ~azure.storage.blob.models.GeoReplicationStatusType + :ivar last_sync_time: A GMT date/time value, to the second. All primary writes preceding this + value are guaranteed to be available for read operations at the secondary. Primary writes after + this point in time may or may not be available for reads. Required. + :vartype last_sync_time: ~datetime.datetime """ _validation = { - 'status': {'required': True}, - 'last_sync_time': {'required': True}, + "status": {"required": True}, + "last_sync_time": {"required": True}, } _attribute_map = { - 'status': {'key': 'Status', 'type': 'str'}, - 'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123'}, + "status": {"key": "Status", "type": "str"}, + "last_sync_time": {"key": "LastSyncTime", "type": "rfc-1123"}, } def __init__( self, *, - status: Union[str, "GeoReplicationStatusType"], + status: Union[str, "_models.GeoReplicationStatusType"], last_sync_time: datetime.datetime, - **kwargs - ): - super(GeoReplication, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword status: The status of the secondary location. Required. Known values are: "live", + "bootstrap", and "unavailable". + :paramtype status: str or ~azure.storage.blob.models.GeoReplicationStatusType + :keyword last_sync_time: A GMT date/time value, to the second. All primary writes preceding + this value are guaranteed to be available for read operations at the secondary. Primary writes + after this point in time may or may not be available for reads. Required. + :paramtype last_sync_time: ~datetime.datetime + """ + super().__init__(**kwargs) self.status = status self.last_sync_time = last_sync_time -class JsonTextConfiguration(msrest.serialization.Model): +class JsonTextConfiguration(_serialization.Model): """json text configuration. - All required parameters must be populated in order to send to Azure. - - :param record_separator: Required. record separator. - :type record_separator: str + :ivar record_separator: The string used to separate records. + :vartype record_separator: str """ - _validation = { - 'record_separator': {'required': True}, - } - _attribute_map = { - 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, - } - _xml_map = { - 'name': 'JsonTextConfiguration' + "record_separator": {"key": "RecordSeparator", "type": "str", "xml": {"name": "RecordSeparator"}}, } + _xml_map = {"name": "JsonTextConfiguration"} - def __init__( - self, - *, - record_separator: str, - **kwargs - ): - super(JsonTextConfiguration, self).__init__(**kwargs) + def __init__(self, *, record_separator: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword record_separator: The string used to separate records. + :paramtype record_separator: str + """ + super().__init__(**kwargs) self.record_separator = record_separator -class KeyInfo(msrest.serialization.Model): +class KeyInfo(_serialization.Model): """Key information. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param start: Required. The date-time the key is active in ISO 8601 UTC time. - :type start: str - :param expiry: Required. The date-time the key expires in ISO 8601 UTC time. - :type expiry: str + :ivar start: The date-time the key is active in ISO 8601 UTC time. Required. + :vartype start: str + :ivar expiry: The date-time the key expires in ISO 8601 UTC time. Required. + :vartype expiry: str """ _validation = { - 'start': {'required': True}, - 'expiry': {'required': True}, + "start": {"required": True}, + "expiry": {"required": True}, } _attribute_map = { - 'start': {'key': 'Start', 'type': 'str'}, - 'expiry': {'key': 'Expiry', 'type': 'str'}, - } - - def __init__( - self, - *, - start: str, - expiry: str, - **kwargs - ): - super(KeyInfo, self).__init__(**kwargs) + "start": {"key": "Start", "type": "str"}, + "expiry": {"key": "Expiry", "type": "str"}, + } + + def __init__(self, *, start: str, expiry: str, **kwargs: Any) -> None: + """ + :keyword start: The date-time the key is active in ISO 8601 UTC time. Required. + :paramtype start: str + :keyword expiry: The date-time the key expires in ISO 8601 UTC time. Required. + :paramtype expiry: str + """ + super().__init__(**kwargs) self.start = start self.expiry = expiry -class LeaseAccessConditions(msrest.serialization.Model): +class LeaseAccessConditions(_serialization.Model): """Parameter group. - :param lease_id: If specified, the operation only succeeds if the resource's lease is active - and matches this ID. - :type lease_id: str + :ivar lease_id: If specified, the operation only succeeds if the resource's lease is active and + matches this ID. + :vartype lease_id: str """ _attribute_map = { - 'lease_id': {'key': 'leaseId', 'type': 'str'}, + "lease_id": {"key": "leaseId", "type": "str"}, } - def __init__( - self, - *, - lease_id: Optional[str] = None, - **kwargs - ): - super(LeaseAccessConditions, self).__init__(**kwargs) + def __init__(self, *, lease_id: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. + :paramtype lease_id: str + """ + super().__init__(**kwargs) self.lease_id = lease_id -class ListBlobsFlatSegmentResponse(msrest.serialization.Model): +class ListBlobsFlatSegmentResponse(_serialization.Model): """An enumeration of blobs. - All required parameters must be populated in order to send to Azure. - - :param service_endpoint: Required. - :type service_endpoint: str - :param container_name: Required. - :type container_name: str - :param prefix: - :type prefix: str - :param marker: - :type marker: str - :param max_results: - :type max_results: int - :param segment: Required. - :type segment: ~azure.storage.blob.models.BlobFlatListSegment - :param next_marker: - :type next_marker: str + All required parameters must be populated in order to send to server. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar container_name: Required. + :vartype container_name: str + :ivar prefix: + :vartype prefix: str + :ivar marker: + :vartype marker: str + :ivar max_results: + :vartype max_results: int + :ivar segment: Required. + :vartype segment: ~azure.storage.blob.models.BlobFlatListSegment + :ivar next_marker: + :vartype next_marker: str """ _validation = { - 'service_endpoint': {'required': True}, - 'container_name': {'required': True}, - 'segment': {'required': True}, + "service_endpoint": {"required": True}, + "container_name": {"required": True}, + "segment": {"required": True}, } _attribute_map = { - 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, - 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, - 'prefix': {'key': 'Prefix', 'type': 'str'}, - 'marker': {'key': 'Marker', 'type': 'str'}, - 'max_results': {'key': 'MaxResults', 'type': 'int'}, - 'segment': {'key': 'Segment', 'type': 'BlobFlatListSegment'}, - 'next_marker': {'key': 'NextMarker', 'type': 'str'}, - } - _xml_map = { - 'name': 'EnumerationResults' + "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, + "container_name": {"key": "ContainerName", "type": "str", "xml": {"attr": True}}, + "prefix": {"key": "Prefix", "type": "str"}, + "marker": {"key": "Marker", "type": "str"}, + "max_results": {"key": "MaxResults", "type": "int"}, + "segment": {"key": "Segment", "type": "BlobFlatListSegment"}, + "next_marker": {"key": "NextMarker", "type": "str"}, } + _xml_map = {"name": "EnumerationResults"} def __init__( self, *, service_endpoint: str, container_name: str, - segment: "BlobFlatListSegment", + segment: "_models.BlobFlatListSegment", prefix: Optional[str] = None, marker: Optional[str] = None, max_results: Optional[int] = None, next_marker: Optional[str] = None, - **kwargs - ): - super(ListBlobsFlatSegmentResponse, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword container_name: Required. + :paramtype container_name: str + :keyword prefix: + :paramtype prefix: str + :keyword marker: + :paramtype marker: str + :keyword max_results: + :paramtype max_results: int + :keyword segment: Required. + :paramtype segment: ~azure.storage.blob.models.BlobFlatListSegment + :keyword next_marker: + :paramtype next_marker: str + """ + super().__init__(**kwargs) self.service_endpoint = service_endpoint self.container_name = container_name self.prefix = prefix @@ -1499,63 +1774,79 @@ def __init__( self.next_marker = next_marker -class ListBlobsHierarchySegmentResponse(msrest.serialization.Model): +class ListBlobsHierarchySegmentResponse(_serialization.Model): """An enumeration of blobs. - All required parameters must be populated in order to send to Azure. - - :param service_endpoint: Required. - :type service_endpoint: str - :param container_name: Required. - :type container_name: str - :param prefix: - :type prefix: str - :param marker: - :type marker: str - :param max_results: - :type max_results: int - :param delimiter: - :type delimiter: str - :param segment: Required. - :type segment: ~azure.storage.blob.models.BlobHierarchyListSegment - :param next_marker: - :type next_marker: str + All required parameters must be populated in order to send to server. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar container_name: Required. + :vartype container_name: str + :ivar prefix: + :vartype prefix: str + :ivar marker: + :vartype marker: str + :ivar max_results: + :vartype max_results: int + :ivar delimiter: + :vartype delimiter: str + :ivar segment: Required. + :vartype segment: ~azure.storage.blob.models.BlobHierarchyListSegment + :ivar next_marker: + :vartype next_marker: str """ _validation = { - 'service_endpoint': {'required': True}, - 'container_name': {'required': True}, - 'segment': {'required': True}, + "service_endpoint": {"required": True}, + "container_name": {"required": True}, + "segment": {"required": True}, } _attribute_map = { - 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, - 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, - 'prefix': {'key': 'Prefix', 'type': 'str'}, - 'marker': {'key': 'Marker', 'type': 'str'}, - 'max_results': {'key': 'MaxResults', 'type': 'int'}, - 'delimiter': {'key': 'Delimiter', 'type': 'str'}, - 'segment': {'key': 'Segment', 'type': 'BlobHierarchyListSegment'}, - 'next_marker': {'key': 'NextMarker', 'type': 'str'}, - } - _xml_map = { - 'name': 'EnumerationResults' + "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, + "container_name": {"key": "ContainerName", "type": "str", "xml": {"attr": True}}, + "prefix": {"key": "Prefix", "type": "str"}, + "marker": {"key": "Marker", "type": "str"}, + "max_results": {"key": "MaxResults", "type": "int"}, + "delimiter": {"key": "Delimiter", "type": "str"}, + "segment": {"key": "Segment", "type": "BlobHierarchyListSegment"}, + "next_marker": {"key": "NextMarker", "type": "str"}, } + _xml_map = {"name": "EnumerationResults"} def __init__( self, *, service_endpoint: str, container_name: str, - segment: "BlobHierarchyListSegment", + segment: "_models.BlobHierarchyListSegment", prefix: Optional[str] = None, marker: Optional[str] = None, max_results: Optional[int] = None, delimiter: Optional[str] = None, next_marker: Optional[str] = None, - **kwargs - ): - super(ListBlobsHierarchySegmentResponse, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword container_name: Required. + :paramtype container_name: str + :keyword prefix: + :paramtype prefix: str + :keyword marker: + :paramtype marker: str + :keyword max_results: + :paramtype max_results: int + :keyword delimiter: + :paramtype delimiter: str + :keyword segment: Required. + :paramtype segment: ~azure.storage.blob.models.BlobHierarchyListSegment + :keyword next_marker: + :paramtype next_marker: str + """ + super().__init__(**kwargs) self.service_endpoint = service_endpoint self.container_name = container_name self.prefix = prefix @@ -1566,54 +1857,70 @@ def __init__( self.next_marker = next_marker -class ListContainersSegmentResponse(msrest.serialization.Model): +class ListContainersSegmentResponse(_serialization.Model): """An enumeration of containers. - All required parameters must be populated in order to send to Azure. - - :param service_endpoint: Required. - :type service_endpoint: str - :param prefix: - :type prefix: str - :param marker: - :type marker: str - :param max_results: - :type max_results: int - :param container_items: Required. - :type container_items: list[~azure.storage.blob.models.ContainerItem] - :param next_marker: - :type next_marker: str + All required parameters must be populated in order to send to server. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar prefix: + :vartype prefix: str + :ivar marker: + :vartype marker: str + :ivar max_results: + :vartype max_results: int + :ivar container_items: Required. + :vartype container_items: list[~azure.storage.blob.models.ContainerItem] + :ivar next_marker: + :vartype next_marker: str """ _validation = { - 'service_endpoint': {'required': True}, - 'container_items': {'required': True}, + "service_endpoint": {"required": True}, + "container_items": {"required": True}, } _attribute_map = { - 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, - 'prefix': {'key': 'Prefix', 'type': 'str'}, - 'marker': {'key': 'Marker', 'type': 'str'}, - 'max_results': {'key': 'MaxResults', 'type': 'int'}, - 'container_items': {'key': 'ContainerItems', 'type': '[ContainerItem]', 'xml': {'name': 'Containers', 'wrapped': True, 'itemsName': 'Container'}}, - 'next_marker': {'key': 'NextMarker', 'type': 'str'}, - } - _xml_map = { - 'name': 'EnumerationResults' - } + "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, + "prefix": {"key": "Prefix", "type": "str"}, + "marker": {"key": "Marker", "type": "str"}, + "max_results": {"key": "MaxResults", "type": "int"}, + "container_items": { + "key": "ContainerItems", + "type": "[ContainerItem]", + "xml": {"name": "Containers", "wrapped": True, "itemsName": "Container"}, + }, + "next_marker": {"key": "NextMarker", "type": "str"}, + } + _xml_map = {"name": "EnumerationResults"} def __init__( self, *, service_endpoint: str, - container_items: List["ContainerItem"], + container_items: List["_models.ContainerItem"], prefix: Optional[str] = None, marker: Optional[str] = None, max_results: Optional[int] = None, next_marker: Optional[str] = None, - **kwargs - ): - super(ListContainersSegmentResponse, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword prefix: + :paramtype prefix: str + :keyword marker: + :paramtype marker: str + :keyword max_results: + :paramtype max_results: int + :keyword container_items: Required. + :paramtype container_items: list[~azure.storage.blob.models.ContainerItem] + :keyword next_marker: + :paramtype next_marker: str + """ + super().__init__(**kwargs) self.service_endpoint = service_endpoint self.prefix = prefix self.marker = marker @@ -1622,38 +1929,38 @@ def __init__( self.next_marker = next_marker -class Logging(msrest.serialization.Model): +class Logging(_serialization.Model): """Azure Analytics Logging settings. - All required parameters must be populated in order to send to Azure. - - :param version: Required. The version of Storage Analytics to configure. - :type version: str - :param delete: Required. Indicates whether all delete requests should be logged. - :type delete: bool - :param read: Required. Indicates whether all read requests should be logged. - :type read: bool - :param write: Required. Indicates whether all write requests should be logged. - :type write: bool - :param retention_policy: Required. the retention policy which determines how long the - associated data should persist. - :type retention_policy: ~azure.storage.blob.models.RetentionPolicy + All required parameters must be populated in order to send to server. + + :ivar version: The version of Storage Analytics to configure. Required. + :vartype version: str + :ivar delete: Indicates whether all delete requests should be logged. Required. + :vartype delete: bool + :ivar read: Indicates whether all read requests should be logged. Required. + :vartype read: bool + :ivar write: Indicates whether all write requests should be logged. Required. + :vartype write: bool + :ivar retention_policy: the retention policy which determines how long the associated data + should persist. Required. + :vartype retention_policy: ~azure.storage.blob.models.RetentionPolicy """ _validation = { - 'version': {'required': True}, - 'delete': {'required': True}, - 'read': {'required': True}, - 'write': {'required': True}, - 'retention_policy': {'required': True}, + "version": {"required": True}, + "delete": {"required": True}, + "read": {"required": True}, + "write": {"required": True}, + "retention_policy": {"required": True}, } _attribute_map = { - 'version': {'key': 'Version', 'type': 'str'}, - 'delete': {'key': 'Delete', 'type': 'bool'}, - 'read': {'key': 'Read', 'type': 'bool'}, - 'write': {'key': 'Write', 'type': 'bool'}, - 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + "version": {"key": "Version", "type": "str"}, + "delete": {"key": "Delete", "type": "bool"}, + "read": {"key": "Read", "type": "bool"}, + "write": {"key": "Write", "type": "bool"}, + "retention_policy": {"key": "RetentionPolicy", "type": "RetentionPolicy"}, } def __init__( @@ -1663,10 +1970,23 @@ def __init__( delete: bool, read: bool, write: bool, - retention_policy: "RetentionPolicy", - **kwargs - ): - super(Logging, self).__init__(**kwargs) + retention_policy: "_models.RetentionPolicy", + **kwargs: Any + ) -> None: + """ + :keyword version: The version of Storage Analytics to configure. Required. + :paramtype version: str + :keyword delete: Indicates whether all delete requests should be logged. Required. + :paramtype delete: bool + :keyword read: Indicates whether all read requests should be logged. Required. + :paramtype read: bool + :keyword write: Indicates whether all write requests should be logged. Required. + :paramtype write: bool + :keyword retention_policy: the retention policy which determines how long the associated data + should persist. Required. + :paramtype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + super().__init__(**kwargs) self.version = version self.delete = delete self.read = read @@ -1674,32 +1994,32 @@ def __init__( self.retention_policy = retention_policy -class Metrics(msrest.serialization.Model): +class Metrics(_serialization.Model): """a summary of request statistics grouped by API in hour or minute aggregates for blobs. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param version: The version of Storage Analytics to configure. - :type version: str - :param enabled: Required. Indicates whether metrics are enabled for the Blob service. - :type enabled: bool - :param include_apis: Indicates whether metrics should generate summary statistics for called - API operations. - :type include_apis: bool - :param retention_policy: the retention policy which determines how long the associated data + :ivar version: The version of Storage Analytics to configure. + :vartype version: str + :ivar enabled: Indicates whether metrics are enabled for the Blob service. Required. + :vartype enabled: bool + :ivar include_apis: Indicates whether metrics should generate summary statistics for called API + operations. + :vartype include_apis: bool + :ivar retention_policy: the retention policy which determines how long the associated data should persist. - :type retention_policy: ~azure.storage.blob.models.RetentionPolicy + :vartype retention_policy: ~azure.storage.blob.models.RetentionPolicy """ _validation = { - 'enabled': {'required': True}, + "enabled": {"required": True}, } _attribute_map = { - 'version': {'key': 'Version', 'type': 'str'}, - 'enabled': {'key': 'Enabled', 'type': 'bool'}, - 'include_apis': {'key': 'IncludeAPIs', 'type': 'bool'}, - 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + "version": {"key": "Version", "type": "str"}, + "enabled": {"key": "Enabled", "type": "bool"}, + "include_apis": {"key": "IncludeAPIs", "type": "bool"}, + "retention_policy": {"key": "RetentionPolicy", "type": "RetentionPolicy"}, } def __init__( @@ -1708,40 +2028,52 @@ def __init__( enabled: bool, version: Optional[str] = None, include_apis: Optional[bool] = None, - retention_policy: Optional["RetentionPolicy"] = None, - **kwargs - ): - super(Metrics, self).__init__(**kwargs) + retention_policy: Optional["_models.RetentionPolicy"] = None, + **kwargs: Any + ) -> None: + """ + :keyword version: The version of Storage Analytics to configure. + :paramtype version: str + :keyword enabled: Indicates whether metrics are enabled for the Blob service. Required. + :paramtype enabled: bool + :keyword include_apis: Indicates whether metrics should generate summary statistics for called + API operations. + :paramtype include_apis: bool + :keyword retention_policy: the retention policy which determines how long the associated data + should persist. + :paramtype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + super().__init__(**kwargs) self.version = version self.enabled = enabled self.include_apis = include_apis self.retention_policy = retention_policy -class ModifiedAccessConditions(msrest.serialization.Model): +class ModifiedAccessConditions(_serialization.Model): """Parameter group. - :param if_modified_since: Specify this header value to operate only on a blob if it has been + :ivar if_modified_since: Specify this header value to operate only on a blob if it has been modified since the specified date/time. - :type if_modified_since: ~datetime.datetime - :param if_unmodified_since: Specify this header value to operate only on a blob if it has not + :vartype if_modified_since: ~datetime.datetime + :ivar if_unmodified_since: Specify this header value to operate only on a blob if it has not been modified since the specified date/time. - :type if_unmodified_since: ~datetime.datetime - :param if_match: Specify an ETag value to operate only on blobs with a matching value. - :type if_match: str - :param if_none_match: Specify an ETag value to operate only on blobs without a matching value. - :type if_none_match: str - :param if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a - matching value. - :type if_tags: str + :vartype if_unmodified_since: ~datetime.datetime + :ivar if_match: Specify an ETag value to operate only on blobs with a matching value. + :vartype if_match: str + :ivar if_none_match: Specify an ETag value to operate only on blobs without a matching value. + :vartype if_none_match: str + :ivar if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a matching + value. + :vartype if_tags: str """ _attribute_map = { - 'if_modified_since': {'key': 'ifModifiedSince', 'type': 'rfc-1123'}, - 'if_unmodified_since': {'key': 'ifUnmodifiedSince', 'type': 'rfc-1123'}, - 'if_match': {'key': 'ifMatch', 'type': 'str'}, - 'if_none_match': {'key': 'ifNoneMatch', 'type': 'str'}, - 'if_tags': {'key': 'ifTags', 'type': 'str'}, + "if_modified_since": {"key": "ifModifiedSince", "type": "rfc-1123"}, + "if_unmodified_since": {"key": "ifUnmodifiedSince", "type": "rfc-1123"}, + "if_match": {"key": "ifMatch", "type": "str"}, + "if_none_match": {"key": "ifNoneMatch", "type": "str"}, + "if_tags": {"key": "ifTags", "type": "str"}, } def __init__( @@ -1752,9 +2084,25 @@ def __init__( if_match: Optional[str] = None, if_none_match: Optional[str] = None, if_tags: Optional[str] = None, - **kwargs - ): - super(ModifiedAccessConditions, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_match: Specify an ETag value to operate only on blobs with a matching value. + :paramtype if_match: str + :keyword if_none_match: Specify an ETag value to operate only on blobs without a matching + value. + :paramtype if_none_match: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. + :paramtype if_tags: str + """ + super().__init__(**kwargs) self.if_modified_since = if_modified_since self.if_unmodified_since = if_unmodified_since self.if_match = if_match @@ -1762,136 +2110,175 @@ def __init__( self.if_tags = if_tags -class PageList(msrest.serialization.Model): +class PageList(_serialization.Model): """the list of pages. - :param page_range: - :type page_range: list[~azure.storage.blob.models.PageRange] - :param clear_range: - :type clear_range: list[~azure.storage.blob.models.ClearRange] + :ivar page_range: + :vartype page_range: list[~azure.storage.blob.models.PageRange] + :ivar clear_range: + :vartype clear_range: list[~azure.storage.blob.models.ClearRange] + :ivar next_marker: + :vartype next_marker: str """ _attribute_map = { - 'page_range': {'key': 'PageRange', 'type': '[PageRange]'}, - 'clear_range': {'key': 'ClearRange', 'type': '[ClearRange]'}, + "page_range": {"key": "PageRange", "type": "[PageRange]", "xml": {"itemsName": "PageRange"}}, + "clear_range": {"key": "ClearRange", "type": "[ClearRange]", "xml": {"itemsName": "ClearRange"}}, + "next_marker": {"key": "NextMarker", "type": "str"}, } def __init__( self, *, - page_range: Optional[List["PageRange"]] = None, - clear_range: Optional[List["ClearRange"]] = None, - **kwargs - ): - super(PageList, self).__init__(**kwargs) + page_range: Optional[List["_models.PageRange"]] = None, + clear_range: Optional[List["_models.ClearRange"]] = None, + next_marker: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword page_range: + :paramtype page_range: list[~azure.storage.blob.models.PageRange] + :keyword clear_range: + :paramtype clear_range: list[~azure.storage.blob.models.ClearRange] + :keyword next_marker: + :paramtype next_marker: str + """ + super().__init__(**kwargs) self.page_range = page_range self.clear_range = clear_range + self.next_marker = next_marker -class PageRange(msrest.serialization.Model): +class PageRange(_serialization.Model): """PageRange. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param start: Required. - :type start: long - :param end: Required. - :type end: long + :ivar start: Required. + :vartype start: int + :ivar end: Required. + :vartype end: int """ _validation = { - 'start': {'required': True}, - 'end': {'required': True}, + "start": {"required": True}, + "end": {"required": True}, } _attribute_map = { - 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, - 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, - } - _xml_map = { - 'name': 'PageRange' - } - - def __init__( - self, - *, - start: int, - end: int, - **kwargs - ): - super(PageRange, self).__init__(**kwargs) + "start": {"key": "Start", "type": "int", "xml": {"name": "Start"}}, + "end": {"key": "End", "type": "int", "xml": {"name": "End"}}, + } + _xml_map = {"name": "PageRange"} + + def __init__(self, *, start: int, end: int, **kwargs: Any) -> None: + """ + :keyword start: Required. + :paramtype start: int + :keyword end: Required. + :paramtype end: int + """ + super().__init__(**kwargs) self.start = start self.end = end -class QueryFormat(msrest.serialization.Model): +class QueryFormat(_serialization.Model): """QueryFormat. - :param type: The quick query format type. Possible values include: "delimited", "json", - "arrow". - :type type: str or ~azure.storage.blob.models.QueryFormatType - :param delimited_text_configuration: delimited text configuration. - :type delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration - :param json_text_configuration: json text configuration. - :type json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration - :param arrow_configuration: arrow configuration. - :type arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration + All required parameters must be populated in order to send to server. + + :ivar type: The quick query format type. Required. Known values are: "delimited", "json", + "arrow", and "parquet". + :vartype type: str or ~azure.storage.blob.models.QueryFormatType + :ivar delimited_text_configuration: Groups the settings used for interpreting the blob data if + the blob is delimited text formatted. + :vartype delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration + :ivar json_text_configuration: json text configuration. + :vartype json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration + :ivar arrow_configuration: Groups the settings used for formatting the response if the response + should be Arrow formatted. + :vartype arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration + :ivar parquet_text_configuration: parquet configuration. + :vartype parquet_text_configuration: JSON """ + _validation = { + "type": {"required": True}, + } + _attribute_map = { - 'type': {'key': 'Type', 'type': 'str', 'xml': {'name': 'Type'}}, - 'delimited_text_configuration': {'key': 'DelimitedTextConfiguration', 'type': 'DelimitedTextConfiguration'}, - 'json_text_configuration': {'key': 'JsonTextConfiguration', 'type': 'JsonTextConfiguration'}, - 'arrow_configuration': {'key': 'ArrowConfiguration', 'type': 'ArrowConfiguration'}, + "type": {"key": "Type", "type": "str", "xml": {"name": "Type"}}, + "delimited_text_configuration": {"key": "DelimitedTextConfiguration", "type": "DelimitedTextConfiguration"}, + "json_text_configuration": {"key": "JsonTextConfiguration", "type": "JsonTextConfiguration"}, + "arrow_configuration": {"key": "ArrowConfiguration", "type": "ArrowConfiguration"}, + "parquet_text_configuration": {"key": "ParquetTextConfiguration", "type": "object"}, } def __init__( self, *, - type: Optional[Union[str, "QueryFormatType"]] = None, - delimited_text_configuration: Optional["DelimitedTextConfiguration"] = None, - json_text_configuration: Optional["JsonTextConfiguration"] = None, - arrow_configuration: Optional["ArrowConfiguration"] = None, - **kwargs - ): - super(QueryFormat, self).__init__(**kwargs) + type: Union[str, "_models.QueryFormatType"], + delimited_text_configuration: Optional["_models.DelimitedTextConfiguration"] = None, + json_text_configuration: Optional["_models.JsonTextConfiguration"] = None, + arrow_configuration: Optional["_models.ArrowConfiguration"] = None, + parquet_text_configuration: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword type: The quick query format type. Required. Known values are: "delimited", "json", + "arrow", and "parquet". + :paramtype type: str or ~azure.storage.blob.models.QueryFormatType + :keyword delimited_text_configuration: Groups the settings used for interpreting the blob data + if the blob is delimited text formatted. + :paramtype delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration + :keyword json_text_configuration: json text configuration. + :paramtype json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration + :keyword arrow_configuration: Groups the settings used for formatting the response if the + response should be Arrow formatted. + :paramtype arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration + :keyword parquet_text_configuration: parquet configuration. + :paramtype parquet_text_configuration: JSON + """ + super().__init__(**kwargs) self.type = type self.delimited_text_configuration = delimited_text_configuration self.json_text_configuration = json_text_configuration self.arrow_configuration = arrow_configuration + self.parquet_text_configuration = parquet_text_configuration -class QueryRequest(msrest.serialization.Model): - """the quick query body. +class QueryRequest(_serialization.Model): + """Groups the set of query request settings. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :ivar query_type: Required. the query type. Default value: "SQL". + :ivar query_type: Required. The type of the provided query expression. Required. Default value + is "SQL". :vartype query_type: str - :param expression: Required. a query statement. - :type expression: str - :param input_serialization: - :type input_serialization: ~azure.storage.blob.models.QuerySerialization - :param output_serialization: - :type output_serialization: ~azure.storage.blob.models.QuerySerialization + :ivar expression: The query expression in SQL. The maximum size of the query expression is + 256KiB. Required. + :vartype expression: str + :ivar input_serialization: + :vartype input_serialization: ~azure.storage.blob.models.QuerySerialization + :ivar output_serialization: + :vartype output_serialization: ~azure.storage.blob.models.QuerySerialization """ _validation = { - 'query_type': {'required': True, 'constant': True}, - 'expression': {'required': True}, + "query_type": {"required": True, "constant": True}, + "expression": {"required": True}, } _attribute_map = { - 'query_type': {'key': 'QueryType', 'type': 'str', 'xml': {'name': 'QueryType'}}, - 'expression': {'key': 'Expression', 'type': 'str', 'xml': {'name': 'Expression'}}, - 'input_serialization': {'key': 'InputSerialization', 'type': 'QuerySerialization'}, - 'output_serialization': {'key': 'OutputSerialization', 'type': 'QuerySerialization'}, - } - _xml_map = { - 'name': 'QueryRequest' + "query_type": {"key": "QueryType", "type": "str", "xml": {"name": "QueryType"}}, + "expression": {"key": "Expression", "type": "str", "xml": {"name": "Expression"}}, + "input_serialization": {"key": "InputSerialization", "type": "QuerySerialization"}, + "output_serialization": {"key": "OutputSerialization", "type": "QuerySerialization"}, } + _xml_map = {"name": "QueryRequest"} query_type = "SQL" @@ -1899,102 +2286,116 @@ def __init__( self, *, expression: str, - input_serialization: Optional["QuerySerialization"] = None, - output_serialization: Optional["QuerySerialization"] = None, - **kwargs - ): - super(QueryRequest, self).__init__(**kwargs) + input_serialization: Optional["_models.QuerySerialization"] = None, + output_serialization: Optional["_models.QuerySerialization"] = None, + **kwargs: Any + ) -> None: + """ + :keyword expression: The query expression in SQL. The maximum size of the query expression is + 256KiB. Required. + :paramtype expression: str + :keyword input_serialization: + :paramtype input_serialization: ~azure.storage.blob.models.QuerySerialization + :keyword output_serialization: + :paramtype output_serialization: ~azure.storage.blob.models.QuerySerialization + """ + super().__init__(**kwargs) self.expression = expression self.input_serialization = input_serialization self.output_serialization = output_serialization -class QuerySerialization(msrest.serialization.Model): +class QuerySerialization(_serialization.Model): """QuerySerialization. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param format: Required. - :type format: ~azure.storage.blob.models.QueryFormat + :ivar format: Required. + :vartype format: ~azure.storage.blob.models.QueryFormat """ _validation = { - 'format': {'required': True}, + "format": {"required": True}, } _attribute_map = { - 'format': {'key': 'Format', 'type': 'QueryFormat'}, + "format": {"key": "Format", "type": "QueryFormat"}, } - def __init__( - self, - *, - format: "QueryFormat", - **kwargs - ): - super(QuerySerialization, self).__init__(**kwargs) + def __init__(self, *, format: "_models.QueryFormat", **kwargs: Any) -> None: + """ + :keyword format: Required. + :paramtype format: ~azure.storage.blob.models.QueryFormat + """ + super().__init__(**kwargs) self.format = format -class RetentionPolicy(msrest.serialization.Model): +class RetentionPolicy(_serialization.Model): """the retention policy which determines how long the associated data should persist. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param enabled: Required. Indicates whether a retention policy is enabled for the storage - service. - :type enabled: bool - :param days: Indicates the number of days that metrics or logging or soft-deleted data should - be retained. All data older than this value will be deleted. - :type days: int - :param allow_permanent_delete: Indicates whether permanent delete is allowed on this storage + :ivar enabled: Indicates whether a retention policy is enabled for the storage service. + Required. + :vartype enabled: bool + :ivar days: Indicates the number of days that metrics or logging or soft-deleted data should be + retained. All data older than this value will be deleted. + :vartype days: int + :ivar allow_permanent_delete: Indicates whether permanent delete is allowed on this storage account. - :type allow_permanent_delete: bool + :vartype allow_permanent_delete: bool """ _validation = { - 'enabled': {'required': True}, - 'days': {'minimum': 1}, + "enabled": {"required": True}, + "days": {"minimum": 1}, } _attribute_map = { - 'enabled': {'key': 'Enabled', 'type': 'bool'}, - 'days': {'key': 'Days', 'type': 'int'}, - 'allow_permanent_delete': {'key': 'AllowPermanentDelete', 'type': 'bool'}, + "enabled": {"key": "Enabled", "type": "bool"}, + "days": {"key": "Days", "type": "int"}, + "allow_permanent_delete": {"key": "AllowPermanentDelete", "type": "bool"}, } def __init__( - self, - *, - enabled: bool, - days: Optional[int] = None, - allow_permanent_delete: Optional[bool] = None, - **kwargs - ): - super(RetentionPolicy, self).__init__(**kwargs) + self, *, enabled: bool, days: Optional[int] = None, allow_permanent_delete: Optional[bool] = None, **kwargs: Any + ) -> None: + """ + :keyword enabled: Indicates whether a retention policy is enabled for the storage service. + Required. + :paramtype enabled: bool + :keyword days: Indicates the number of days that metrics or logging or soft-deleted data should + be retained. All data older than this value will be deleted. + :paramtype days: int + :keyword allow_permanent_delete: Indicates whether permanent delete is allowed on this storage + account. + :paramtype allow_permanent_delete: bool + """ + super().__init__(**kwargs) self.enabled = enabled self.days = days self.allow_permanent_delete = allow_permanent_delete -class SequenceNumberAccessConditions(msrest.serialization.Model): +class SequenceNumberAccessConditions(_serialization.Model): """Parameter group. - :param if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on a + :ivar if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on a blob if it has a sequence number less than or equal to the specified. - :type if_sequence_number_less_than_or_equal_to: long - :param if_sequence_number_less_than: Specify this header value to operate only on a blob if it + :vartype if_sequence_number_less_than_or_equal_to: int + :ivar if_sequence_number_less_than: Specify this header value to operate only on a blob if it has a sequence number less than the specified. - :type if_sequence_number_less_than: long - :param if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + :vartype if_sequence_number_less_than: int + :ivar if_sequence_number_equal_to: Specify this header value to operate only on a blob if it has the specified sequence number. - :type if_sequence_number_equal_to: long + :vartype if_sequence_number_equal_to: int """ _attribute_map = { - 'if_sequence_number_less_than_or_equal_to': {'key': 'ifSequenceNumberLessThanOrEqualTo', 'type': 'long'}, - 'if_sequence_number_less_than': {'key': 'ifSequenceNumberLessThan', 'type': 'long'}, - 'if_sequence_number_equal_to': {'key': 'ifSequenceNumberEqualTo', 'type': 'long'}, + "if_sequence_number_less_than_or_equal_to": {"key": "ifSequenceNumberLessThanOrEqualTo", "type": "int"}, + "if_sequence_number_less_than": {"key": "ifSequenceNumberLessThan", "type": "int"}, + "if_sequence_number_equal_to": {"key": "ifSequenceNumberEqualTo", "type": "int"}, } def __init__( @@ -2003,74 +2404,89 @@ def __init__( if_sequence_number_less_than_or_equal_to: Optional[int] = None, if_sequence_number_less_than: Optional[int] = None, if_sequence_number_equal_to: Optional[int] = None, - **kwargs - ): - super(SequenceNumberAccessConditions, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on + a blob if it has a sequence number less than or equal to the specified. + :paramtype if_sequence_number_less_than_or_equal_to: int + :keyword if_sequence_number_less_than: Specify this header value to operate only on a blob if + it has a sequence number less than the specified. + :paramtype if_sequence_number_less_than: int + :keyword if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. + :paramtype if_sequence_number_equal_to: int + """ + super().__init__(**kwargs) self.if_sequence_number_less_than_or_equal_to = if_sequence_number_less_than_or_equal_to self.if_sequence_number_less_than = if_sequence_number_less_than self.if_sequence_number_equal_to = if_sequence_number_equal_to -class SignedIdentifier(msrest.serialization.Model): +class SignedIdentifier(_serialization.Model): """signed identifier. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param id: Required. a unique id. - :type id: str - :param access_policy: An Access policy. - :type access_policy: ~azure.storage.blob.models.AccessPolicy + :ivar id: a unique id. Required. + :vartype id: str + :ivar access_policy: An Access policy. + :vartype access_policy: ~azure.storage.blob.models.AccessPolicy """ _validation = { - 'id': {'required': True}, + "id": {"required": True}, } _attribute_map = { - 'id': {'key': 'Id', 'type': 'str'}, - 'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy'}, - } - _xml_map = { - 'name': 'SignedIdentifier' + "id": {"key": "Id", "type": "str"}, + "access_policy": {"key": "AccessPolicy", "type": "AccessPolicy"}, } + _xml_map = {"name": "SignedIdentifier"} def __init__( self, *, - id: str, - access_policy: Optional["AccessPolicy"] = None, - **kwargs - ): - super(SignedIdentifier, self).__init__(**kwargs) + id: str, # pylint: disable=redefined-builtin + access_policy: Optional["_models.AccessPolicy"] = None, + **kwargs: Any + ) -> None: + """ + :keyword id: a unique id. Required. + :paramtype id: str + :keyword access_policy: An Access policy. + :paramtype access_policy: ~azure.storage.blob.models.AccessPolicy + """ + super().__init__(**kwargs) self.id = id self.access_policy = access_policy -class SourceModifiedAccessConditions(msrest.serialization.Model): +class SourceModifiedAccessConditions(_serialization.Model): """Parameter group. - :param source_if_modified_since: Specify this header value to operate only on a blob if it has + :ivar source_if_modified_since: Specify this header value to operate only on a blob if it has been modified since the specified date/time. - :type source_if_modified_since: ~datetime.datetime - :param source_if_unmodified_since: Specify this header value to operate only on a blob if it - has not been modified since the specified date/time. - :type source_if_unmodified_since: ~datetime.datetime - :param source_if_match: Specify an ETag value to operate only on blobs with a matching value. - :type source_if_match: str - :param source_if_none_match: Specify an ETag value to operate only on blobs without a matching + :vartype source_if_modified_since: ~datetime.datetime + :ivar source_if_unmodified_since: Specify this header value to operate only on a blob if it has + not been modified since the specified date/time. + :vartype source_if_unmodified_since: ~datetime.datetime + :ivar source_if_match: Specify an ETag value to operate only on blobs with a matching value. + :vartype source_if_match: str + :ivar source_if_none_match: Specify an ETag value to operate only on blobs without a matching value. - :type source_if_none_match: str - :param source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + :vartype source_if_none_match: str + :ivar source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a matching value. - :type source_if_tags: str + :vartype source_if_tags: str """ _attribute_map = { - 'source_if_modified_since': {'key': 'sourceIfModifiedSince', 'type': 'rfc-1123'}, - 'source_if_unmodified_since': {'key': 'sourceIfUnmodifiedSince', 'type': 'rfc-1123'}, - 'source_if_match': {'key': 'sourceIfMatch', 'type': 'str'}, - 'source_if_none_match': {'key': 'sourceIfNoneMatch', 'type': 'str'}, - 'source_if_tags': {'key': 'sourceIfTags', 'type': 'str'}, + "source_if_modified_since": {"key": "sourceIfModifiedSince", "type": "rfc-1123"}, + "source_if_unmodified_since": {"key": "sourceIfUnmodifiedSince", "type": "rfc-1123"}, + "source_if_match": {"key": "sourceIfMatch", "type": "str"}, + "source_if_none_match": {"key": "sourceIfNoneMatch", "type": "str"}, + "source_if_tags": {"key": "sourceIfTags", "type": "str"}, } def __init__( @@ -2081,9 +2497,25 @@ def __init__( source_if_match: Optional[str] = None, source_if_none_match: Optional[str] = None, source_if_tags: Optional[str] = None, - **kwargs - ): - super(SourceModifiedAccessConditions, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify an ETag value to operate only on blobs without a + matching value. + :paramtype source_if_none_match: str + :keyword source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with + a matching value. + :paramtype source_if_tags: str + """ + super().__init__(**kwargs) self.source_if_modified_since = source_if_modified_since self.source_if_unmodified_since = source_if_unmodified_since self.source_if_match = source_if_match @@ -2091,30 +2523,30 @@ def __init__( self.source_if_tags = source_if_tags -class StaticWebsite(msrest.serialization.Model): +class StaticWebsite(_serialization.Model): """The properties that enable an account to host a static website. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param enabled: Required. Indicates whether this account is hosting a static website. - :type enabled: bool - :param index_document: The default name of the index page under each directory. - :type index_document: str - :param error_document404_path: The absolute path of the custom 404 page. - :type error_document404_path: str - :param default_index_document_path: Absolute path of the default index page. - :type default_index_document_path: str + :ivar enabled: Indicates whether this account is hosting a static website. Required. + :vartype enabled: bool + :ivar index_document: The default name of the index page under each directory. + :vartype index_document: str + :ivar error_document404_path: The absolute path of the custom 404 page. + :vartype error_document404_path: str + :ivar default_index_document_path: Absolute path of the default index page. + :vartype default_index_document_path: str """ _validation = { - 'enabled': {'required': True}, + "enabled": {"required": True}, } _attribute_map = { - 'enabled': {'key': 'Enabled', 'type': 'bool'}, - 'index_document': {'key': 'IndexDocument', 'type': 'str'}, - 'error_document404_path': {'key': 'ErrorDocument404Path', 'type': 'str'}, - 'default_index_document_path': {'key': 'DefaultIndexDocumentPath', 'type': 'str'}, + "enabled": {"key": "Enabled", "type": "bool"}, + "index_document": {"key": "IndexDocument", "type": "str"}, + "error_document404_path": {"key": "ErrorDocument404Path", "type": "str"}, + "default_index_document_path": {"key": "DefaultIndexDocumentPath", "type": "str"}, } def __init__( @@ -2124,83 +2556,113 @@ def __init__( index_document: Optional[str] = None, error_document404_path: Optional[str] = None, default_index_document_path: Optional[str] = None, - **kwargs - ): - super(StaticWebsite, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword enabled: Indicates whether this account is hosting a static website. Required. + :paramtype enabled: bool + :keyword index_document: The default name of the index page under each directory. + :paramtype index_document: str + :keyword error_document404_path: The absolute path of the custom 404 page. + :paramtype error_document404_path: str + :keyword default_index_document_path: Absolute path of the default index page. + :paramtype default_index_document_path: str + """ + super().__init__(**kwargs) self.enabled = enabled self.index_document = index_document self.error_document404_path = error_document404_path self.default_index_document_path = default_index_document_path -class StorageError(msrest.serialization.Model): +class StorageError(_serialization.Model): """StorageError. - :param message: - :type message: str + :ivar message: + :vartype message: str """ _attribute_map = { - 'message': {'key': 'Message', 'type': 'str'}, + "message": {"key": "Message", "type": "str"}, } - def __init__( - self, - *, - message: Optional[str] = None, - **kwargs - ): - super(StorageError, self).__init__(**kwargs) + def __init__(self, *, message: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword message: + :paramtype message: str + """ + super().__init__(**kwargs) self.message = message -class StorageServiceProperties(msrest.serialization.Model): +class StorageServiceProperties(_serialization.Model): """Storage Service Properties. - :param logging: Azure Analytics Logging settings. - :type logging: ~azure.storage.blob.models.Logging - :param hour_metrics: a summary of request statistics grouped by API in hour or minute - aggregates for blobs. - :type hour_metrics: ~azure.storage.blob.models.Metrics - :param minute_metrics: a summary of request statistics grouped by API in hour or minute + :ivar logging: Azure Analytics Logging settings. + :vartype logging: ~azure.storage.blob.models.Logging + :ivar hour_metrics: a summary of request statistics grouped by API in hour or minute aggregates + for blobs. + :vartype hour_metrics: ~azure.storage.blob.models.Metrics + :ivar minute_metrics: a summary of request statistics grouped by API in hour or minute aggregates for blobs. - :type minute_metrics: ~azure.storage.blob.models.Metrics - :param cors: The set of CORS rules. - :type cors: list[~azure.storage.blob.models.CorsRule] - :param default_service_version: The default version to use for requests to the Blob service if + :vartype minute_metrics: ~azure.storage.blob.models.Metrics + :ivar cors: The set of CORS rules. + :vartype cors: list[~azure.storage.blob.models.CorsRule] + :ivar default_service_version: The default version to use for requests to the Blob service if an incoming request's version is not specified. Possible values include version 2008-10-27 and all more recent versions. - :type default_service_version: str - :param delete_retention_policy: the retention policy which determines how long the associated + :vartype default_service_version: str + :ivar delete_retention_policy: the retention policy which determines how long the associated data should persist. - :type delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy - :param static_website: The properties that enable an account to host a static website. - :type static_website: ~azure.storage.blob.models.StaticWebsite + :vartype delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy + :ivar static_website: The properties that enable an account to host a static website. + :vartype static_website: ~azure.storage.blob.models.StaticWebsite """ _attribute_map = { - 'logging': {'key': 'Logging', 'type': 'Logging'}, - 'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics'}, - 'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics'}, - 'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'wrapped': True}}, - 'default_service_version': {'key': 'DefaultServiceVersion', 'type': 'str'}, - 'delete_retention_policy': {'key': 'DeleteRetentionPolicy', 'type': 'RetentionPolicy'}, - 'static_website': {'key': 'StaticWebsite', 'type': 'StaticWebsite'}, + "logging": {"key": "Logging", "type": "Logging"}, + "hour_metrics": {"key": "HourMetrics", "type": "Metrics"}, + "minute_metrics": {"key": "MinuteMetrics", "type": "Metrics"}, + "cors": {"key": "Cors", "type": "[CorsRule]", "xml": {"wrapped": True}}, + "default_service_version": {"key": "DefaultServiceVersion", "type": "str"}, + "delete_retention_policy": {"key": "DeleteRetentionPolicy", "type": "RetentionPolicy"}, + "static_website": {"key": "StaticWebsite", "type": "StaticWebsite"}, } def __init__( self, *, - logging: Optional["Logging"] = None, - hour_metrics: Optional["Metrics"] = None, - minute_metrics: Optional["Metrics"] = None, - cors: Optional[List["CorsRule"]] = None, + logging: Optional["_models.Logging"] = None, + hour_metrics: Optional["_models.Metrics"] = None, + minute_metrics: Optional["_models.Metrics"] = None, + cors: Optional[List["_models.CorsRule"]] = None, default_service_version: Optional[str] = None, - delete_retention_policy: Optional["RetentionPolicy"] = None, - static_website: Optional["StaticWebsite"] = None, - **kwargs - ): - super(StorageServiceProperties, self).__init__(**kwargs) + delete_retention_policy: Optional["_models.RetentionPolicy"] = None, + static_website: Optional["_models.StaticWebsite"] = None, + **kwargs: Any + ) -> None: + """ + :keyword logging: Azure Analytics Logging settings. + :paramtype logging: ~azure.storage.blob.models.Logging + :keyword hour_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :paramtype hour_metrics: ~azure.storage.blob.models.Metrics + :keyword minute_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :paramtype minute_metrics: ~azure.storage.blob.models.Metrics + :keyword cors: The set of CORS rules. + :paramtype cors: list[~azure.storage.blob.models.CorsRule] + :keyword default_service_version: The default version to use for requests to the Blob service + if an incoming request's version is not specified. Possible values include version 2008-10-27 + and all more recent versions. + :paramtype default_service_version: str + :keyword delete_retention_policy: the retention policy which determines how long the associated + data should persist. + :paramtype delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy + :keyword static_website: The properties that enable an account to host a static website. + :paramtype static_website: ~azure.storage.blob.models.StaticWebsite + """ + super().__init__(**kwargs) self.logging = logging self.hour_metrics = hour_metrics self.minute_metrics = minute_metrics @@ -2210,67 +2672,65 @@ def __init__( self.static_website = static_website -class StorageServiceStats(msrest.serialization.Model): +class StorageServiceStats(_serialization.Model): """Stats for the storage service. - :param geo_replication: Geo-Replication information for the Secondary Storage Service. - :type geo_replication: ~azure.storage.blob.models.GeoReplication + :ivar geo_replication: Geo-Replication information for the Secondary Storage Service. + :vartype geo_replication: ~azure.storage.blob.models.GeoReplication """ _attribute_map = { - 'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication'}, + "geo_replication": {"key": "GeoReplication", "type": "GeoReplication"}, } - def __init__( - self, - *, - geo_replication: Optional["GeoReplication"] = None, - **kwargs - ): - super(StorageServiceStats, self).__init__(**kwargs) + def __init__(self, *, geo_replication: Optional["_models.GeoReplication"] = None, **kwargs: Any) -> None: + """ + :keyword geo_replication: Geo-Replication information for the Secondary Storage Service. + :paramtype geo_replication: ~azure.storage.blob.models.GeoReplication + """ + super().__init__(**kwargs) self.geo_replication = geo_replication -class UserDelegationKey(msrest.serialization.Model): +class UserDelegationKey(_serialization.Model): """A user delegation key. - All required parameters must be populated in order to send to Azure. - - :param signed_oid: Required. The Azure Active Directory object ID in GUID format. - :type signed_oid: str - :param signed_tid: Required. The Azure Active Directory tenant ID in GUID format. - :type signed_tid: str - :param signed_start: Required. The date-time the key is active. - :type signed_start: ~datetime.datetime - :param signed_expiry: Required. The date-time the key expires. - :type signed_expiry: ~datetime.datetime - :param signed_service: Required. Abbreviation of the Azure Storage service that accepts the - key. - :type signed_service: str - :param signed_version: Required. The service version that created the key. - :type signed_version: str - :param value: Required. The key as a base64 string. - :type value: str + All required parameters must be populated in order to send to server. + + :ivar signed_oid: The Azure Active Directory object ID in GUID format. Required. + :vartype signed_oid: str + :ivar signed_tid: The Azure Active Directory tenant ID in GUID format. Required. + :vartype signed_tid: str + :ivar signed_start: The date-time the key is active. Required. + :vartype signed_start: ~datetime.datetime + :ivar signed_expiry: The date-time the key expires. Required. + :vartype signed_expiry: ~datetime.datetime + :ivar signed_service: Abbreviation of the Azure Storage service that accepts the key. Required. + :vartype signed_service: str + :ivar signed_version: The service version that created the key. Required. + :vartype signed_version: str + :ivar value: The key as a base64 string. Required. + :vartype value: str """ _validation = { - 'signed_oid': {'required': True}, - 'signed_tid': {'required': True}, - 'signed_start': {'required': True}, - 'signed_expiry': {'required': True}, - 'signed_service': {'required': True}, - 'signed_version': {'required': True}, - 'value': {'required': True}, + "signed_oid": {"required": True}, + "signed_tid": {"required": True}, + "signed_start": {"required": True}, + "signed_expiry": {"required": True}, + "signed_service": {"required": True}, + "signed_version": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'signed_oid': {'key': 'SignedOid', 'type': 'str'}, - 'signed_tid': {'key': 'SignedTid', 'type': 'str'}, - 'signed_start': {'key': 'SignedStart', 'type': 'iso-8601'}, - 'signed_expiry': {'key': 'SignedExpiry', 'type': 'iso-8601'}, - 'signed_service': {'key': 'SignedService', 'type': 'str'}, - 'signed_version': {'key': 'SignedVersion', 'type': 'str'}, - 'value': {'key': 'Value', 'type': 'str'}, + "signed_oid": {"key": "SignedOid", "type": "str"}, + "signed_tid": {"key": "SignedTid", "type": "str"}, + "signed_start": {"key": "SignedStart", "type": "iso-8601"}, + "signed_expiry": {"key": "SignedExpiry", "type": "iso-8601"}, + "signed_service": {"key": "SignedService", "type": "str"}, + "signed_version": {"key": "SignedVersion", "type": "str"}, + "value": {"key": "Value", "type": "str"}, } def __init__( @@ -2283,9 +2743,26 @@ def __init__( signed_service: str, signed_version: str, value: str, - **kwargs - ): - super(UserDelegationKey, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword signed_oid: The Azure Active Directory object ID in GUID format. Required. + :paramtype signed_oid: str + :keyword signed_tid: The Azure Active Directory tenant ID in GUID format. Required. + :paramtype signed_tid: str + :keyword signed_start: The date-time the key is active. Required. + :paramtype signed_start: ~datetime.datetime + :keyword signed_expiry: The date-time the key expires. Required. + :paramtype signed_expiry: ~datetime.datetime + :keyword signed_service: Abbreviation of the Azure Storage service that accepts the key. + Required. + :paramtype signed_service: str + :keyword signed_version: The service version that created the key. Required. + :paramtype signed_version: str + :keyword value: The key as a base64 string. Required. + :paramtype value: str + """ + super().__init__(**kwargs) self.signed_oid = signed_oid self.signed_tid = signed_tid self.signed_start = signed_start diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/_patch.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/_patch.py new file mode 100644 index 000000000000..71dde502c70f --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/models/_patch.py @@ -0,0 +1,26 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + + from typing import List +__all__ = [] # type: List[str] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/__init__.py index 62f85c9290c1..1be05c7aa9a7 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/__init__.py @@ -8,18 +8,22 @@ from ._service_operations import ServiceOperations from ._container_operations import ContainerOperations -from ._directory_operations import DirectoryOperations from ._blob_operations import BlobOperations from ._page_blob_operations import PageBlobOperations from ._append_blob_operations import AppendBlobOperations from ._block_blob_operations import BlockBlobOperations +from ._patch import __all__ as _patch_all +from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk + __all__ = [ - 'ServiceOperations', - 'ContainerOperations', - 'DirectoryOperations', - 'BlobOperations', - 'PageBlobOperations', - 'AppendBlobOperations', - 'BlockBlobOperations', + "ServiceOperations", + "ContainerOperations", + "BlobOperations", + "PageBlobOperations", + "AppendBlobOperations", + "BlockBlobOperations", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_append_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_append_blob_operations.py index abbe40eac438..d07c5c181bf3 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_append_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_append_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,67 +7,462 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import TYPE_CHECKING -import warnings +import sys +from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from .. import models as _models +from .._serialization import Serializer -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False -class AppendBlobOperations(object): - """AppendBlobOperations operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +def build_create_request( + url: str, + *, + content_length: int, + timeout: Optional[int] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[bytes] = None, + blob_cache_control: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + blob_type: Literal["AppendBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "AppendBlob")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-blob-type"] = _SERIALIZER.header("blob_type", blob_type, "str") + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_append_block_request( + url: str, + *, + content_length: int, + content: IO[bytes], + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + lease_id: Optional[str] = None, + max_size: Optional[int] = None, + append_position: Optional[int] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "bytearray" + ) + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if max_size is not None: + _headers["x-ms-blob-condition-maxsize"] = _SERIALIZER.header("max_size", max_size, "int") + if append_position is not None: + _headers["x-ms-blob-condition-appendpos"] = _SERIALIZER.header("append_position", append_position, "int") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if structured_body_type is not None: + _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") + if structured_content_length is not None: + _headers["x-ms-structured-content-length"] = _SERIALIZER.header( + "structured_content_length", structured_content_length, "int" + ) + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_append_block_from_url_request( + url: str, + *, + source_url: str, + content_length: int, + source_range: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + lease_id: Optional[str] = None, + max_size: Optional[int] = None, + append_position: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-copy-source"] = _SERIALIZER.header("source_url", source_url, "str") + if source_range is not None: + _headers["x-ms-source-range"] = _SERIALIZER.header("source_range", source_range, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") + if source_contentcrc64 is not None: + _headers["x-ms-source-content-crc64"] = _SERIALIZER.header( + "source_contentcrc64", source_contentcrc64, "bytearray" + ) + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if max_size is not None: + _headers["x-ms-blob-condition-maxsize"] = _SERIALIZER.header("max_size", max_size, "int") + if append_position is not None: + _headers["x-ms-blob-condition-appendpos"] = _SERIALIZER.header("append_position", append_position, "int") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_seal_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + append_position: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["seal"] = kwargs.pop("comp", _params.pop("comp", "seal")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if append_position is not None: + _headers["x-ms-blob-condition-appendpos"] = _SERIALIZER.header("append_position", append_position, "int") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class AppendBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`append_blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def create( + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - blob_tags_string=None, # type: Optional[str] - blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Create Append Blob operation creates a new append blob. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -74,38 +470,51 @@ def create( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["AppendBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "AppendBlob")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -115,6 +524,7 @@ def create( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -122,363 +532,371 @@ def create( _if_none_match = None _if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - blob_type = "AppendBlob" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_create_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + blob_type=blob_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - create.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def append_block( + @distributed_trace + def append_block( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - body, # type: IO - timeout=None, # type: Optional[int] - transactional_content_md5=None, # type: Optional[bytearray] - transactional_content_crc64=None, # type: Optional[bytearray] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - append_position_access_conditions=None, # type: Optional["_models.AppendPositionAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + body: IO[bytes], + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + request_id_parameter: Optional[str] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Append Block operation commits a new block of data to the end of an existing append blob. The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param append_position_access_conditions: Parameter group. - :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions - :param cpk_info: Parameter group. + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _max_size = None _append_position = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if append_position_access_conditions is not None: - _max_size = append_position_access_conditions.max_size _append_position = append_position_access_conditions.append_position + _max_size = append_position_access_conditions.max_size if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "appendblock" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.append_block.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _max_size is not None: - header_parameters['x-ms-blob-condition-maxsize'] = self._serialize.header("max_size", _max_size, 'long') - if _append_position is not None: - header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = body + + _request = build_append_block_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + lease_id=_lease_id, + max_size=_max_size, + append_position=_append_position, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-append-offset"] = self._deserialize( + "str", response.headers.get("x-ms-blob-append-offset") + ) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) - - append_block.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def append_block_from_url( + @distributed_trace + def append_block_from_url( # pylint: disable=inconsistent-return-statements self, - source_url, # type: str - content_length, # type: int - source_range=None, # type: Optional[str] - source_content_md5=None, # type: Optional[bytearray] - source_contentcrc64=None, # type: Optional[bytearray] - timeout=None, # type: Optional[int] - transactional_content_md5=None, # type: Optional[bytearray] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - append_position_access_conditions=None, # type: Optional["_models.AppendPositionAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + source_url: str, + content_length: int, + source_range: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Append Block operation commits a new block of data to the end of an existing append blob where the contents are read from a source url. The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - :param source_url: Specify a URL to the copy source. + :param source_url: Specify a URL to the copy source. Required. :type source_url: str - :param content_length: The length of the request. - :type content_length: long - :param source_range: Bytes of source data in the specified range. + :param content_length: The length of the request. Required. + :type content_length: int + :param source_range: Bytes of source data in the specified range. Default value is None. :type source_range: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray + from the copy source. Default value is None. + :type source_content_md5: bytes :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. - :type source_contentcrc64: bytearray + read from the copy source. Default value is None. + :type source_contentcrc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + by the service. Default value is None. + :type transactional_content_md5: bytes :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param append_position_access_conditions: Parameter group. - :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions - :param modified_access_conditions: Parameter group. + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _lease_id = None _max_size = None @@ -492,226 +910,207 @@ def append_block_from_url( _source_if_unmodified_since = None _source_if_match = None _source_if_none_match = None - if append_position_access_conditions is not None: - _max_size = append_position_access_conditions.max_size - _append_position = append_position_access_conditions.append_position if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id + if append_position_access_conditions is not None: + _append_position = append_position_access_conditions.append_position + _max_size = append_position_access_conditions.max_size if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match - comp = "appendblock" - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_append_block_from_url_request( + url=self._config.url, + source_url=source_url, + content_length=content_length, + source_range=source_range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + max_size=_max_size, + append_position=_append_position, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.append_block_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') - if source_range is not None: - header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if source_contentcrc64 is not None: - header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _max_size is not None: - header_parameters['x-ms-blob-condition-maxsize'] = self._serialize.header("max_size", _max_size, 'long') - if _append_position is not None: - header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-append-offset"] = self._deserialize( + "str", response.headers.get("x-ms-blob-append-offset") + ) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - append_block_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def seal( + @distributed_trace + def seal( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - append_position_access_conditions=None, # type: Optional["_models.AppendPositionAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version 2019-12-12 version or later. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param append_position_access_conditions: Parameter group. - :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["seal"] = kwargs.pop("comp", _params.pop("comp", "seal")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _append_position = None - if append_position_access_conditions is not None: - _append_position = append_position_access_conditions.append_position if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match - comp = "seal" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if append_position_access_conditions is not None: + _append_position = append_position_access_conditions.append_position + + _request = build_seal_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + append_position=_append_position, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.seal.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _append_position is not None: - header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) if cls: - return cls(pipeline_response, None, response_headers) - - seal.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_blob_operations.py index 730a5648e0f7..5da0b2b033a9 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,518 +7,2110 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, Iterator, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from .. import models as _models +from .._serialization import Serializer + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_download_request( + url: str, + *, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + range_get_content_md5: Optional[bool] = None, + range_get_content_crc64: Optional[bool] = None, + structured_body_type: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if range_get_content_md5 is not None: + _headers["x-ms-range-get-content-md5"] = _SERIALIZER.header( + "range_get_content_md5", range_get_content_md5, "bool" + ) + if range_get_content_crc64 is not None: + _headers["x-ms-range-get-content-crc64"] = _SERIALIZER.header( + "range_get_content_crc64", range_get_content_crc64, "bool" + ) + if structured_body_type is not None: + _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_properties_request( + url: str, + *, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + url: str, + *, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + blob_delete_type: Literal["Permanent"] = "Permanent", + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if blob_delete_type is not None: + _params["deletetype"] = _SERIALIZER.query("blob_delete_type", blob_delete_type, "str") + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if delete_snapshots is not None: + _headers["x-ms-delete-snapshots"] = _SERIALIZER.header("delete_snapshots", delete_snapshots, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_undelete_request( + url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_expiry_request( + url: str, + *, + expiry_options: Union[str, _models.BlobExpiryOptions], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + expires_on: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["x-ms-expiry-option"] = _SERIALIZER.header("expiry_options", expiry_options, "str") + if expires_on is not None: + _headers["x-ms-expiry-time"] = _SERIALIZER.header("expires_on", expires_on, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_http_headers_request( + url: str, + *, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_md5: Optional[bytes] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_immutability_policy_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_immutability_policy_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_legal_hold_request( + url: str, + *, + legal_hold: bool, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["legalhold"] = kwargs.pop("comp", _params.pop("comp", "legalhold")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_metadata_request( + url: str, + *, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_acquire_lease_request( + url: str, + *, + timeout: Optional[int] = None, + duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + if duration is not None: + _headers["x-ms-lease-duration"] = _SERIALIZER.header("duration", duration, "int") + if proposed_lease_id is not None: + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_release_lease_request( + url: str, + *, + lease_id: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_renew_lease_request( + url: str, + *, + lease_id: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_change_lease_request( + url: str, + *, + lease_id: str, + proposed_lease_id: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_break_lease_request( + url: str, + *, + timeout: Optional[int] = None, + break_period: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + if break_period is not None: + _headers["x-ms-lease-break-period"] = _SERIALIZER.header("break_period", break_period, "int") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_snapshot_request( + url: str, + *, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_start_copy_from_url_request( + url: str, + *, + copy_source: str, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_tags: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + seal_blob: Optional[bool] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if rehydrate_priority is not None: + _headers["x-ms-rehydrate-priority"] = _SERIALIZER.header("rehydrate_priority", rehydrate_priority, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + if source_if_tags is not None: + _headers["x-ms-source-if-tags"] = _SERIALIZER.header("source_if_tags", source_if_tags, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if seal_blob is not None: + _headers["x-ms-seal-blob"] = _SERIALIZER.header("seal_blob", seal_blob, "bool") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_copy_from_url_request( + url: str, + *, + copy_source: str, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + encryption_scope: Optional[str] = None, + copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + x_ms_requires_sync: Literal["true"] = kwargs.pop("x_ms_requires_sync", _headers.pop("x-ms-requires-sync", "true")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-requires-sync"] = _SERIALIZER.header("x_ms_requires_sync", x_ms_requires_sync, "str") + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if copy_source_tags is not None: + _headers["x-ms-copy-source-tag-option"] = _SERIALIZER.header("copy_source_tags", copy_source_tags, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_abort_copy_from_url_request( + url: str, + *, + copy_id: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["copy"] = kwargs.pop("comp", _params.pop("comp", "copy")) + copy_action_abort_constant: Literal["abort"] = kwargs.pop( + "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort") + ) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["copyid"] = _SERIALIZER.query("copy_id", copy_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-copy-action"] = _SERIALIZER.header("copy_action_abort_constant", copy_action_abort_constant, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_tier_request( + url: str, + *, + tier: Union[str, _models.AccessTierRequired], + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + request_id_parameter: Optional[str] = None, + lease_id: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tier"] = kwargs.pop("comp", _params.pop("comp", "tier")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if rehydrate_priority is not None: + _headers["x-ms-rehydrate-priority"] = _SERIALIZER.header("rehydrate_priority", rehydrate_priority, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_account_info_request( + url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_query_request( + url: str, + *, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["query"] = kwargs.pop("comp", _params.pop("comp", "query")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_get_tags_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_tags_request( + url: str, + *, + timeout: Optional[int] = None, + version_id: Optional[str] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + request_id_parameter: Optional[str] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "bytearray" + ) + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +class BlobOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class BlobOperations(object): - """BlobOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace def download( self, - snapshot=None, # type: Optional[str] - version_id=None, # type: Optional[str] - timeout=None, # type: Optional[int] - range=None, # type: Optional[str] - range_get_content_md5=None, # type: Optional[bool] - range_get_content_crc64=None, # type: Optional[bool] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> IO + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + range_get_content_md5: Optional[bool] = None, + range_get_content_crc64: Optional[bool] = None, + structured_body_type: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> Iterator[bytes]: + # pylint: disable=line-too-long """The Download operation reads or downloads a blob from the system, including its metadata and properties. You can also call Download to read a snapshot. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str :param range_get_content_md5: When set to true and specified together with the Range, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB - in size. + in size. Default value is None. :type range_get_content_md5: bool :param range_get_content_crc64: When set to true and specified together with the Range, the service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 - MB in size. + MB in size. Default value is None. :type range_get_content_crc64: bool - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + :param structured_body_type: Specifies the response content should be returned as a structured + message and specifies the message schema version and properties. Default value is None. + :type structured_body_type: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IO, or the result of cls(response) - :rtype: IO - :raises: ~azure.core.exceptions.HttpResponseError + :return: Iterator[bytes] or the result of cls(response) + :rtype: Iterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[IO] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_download_request( + url=self._config.url, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + range=range, + lease_id=_lease_id, + range_get_content_md5=range_get_content_md5, + range_get_content_crc64=range_get_content_crc64, + structured_body_type=structured_body_type, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.download.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if range_get_content_md5 is not None: - header_parameters['x-ms-range-get-content-md5'] = self._serialize.header("range_get_content_md5", range_get_content_md5, 'bool') - if range_get_content_crc64 is not None: - header_parameters['x-ms-range-get-content-crc64'] = self._serialize.header("range_get_content_crc64", range_get_content_crc64, 'bool') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} if response.status_code == 200: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) - response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) - response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) + response_headers["x-ms-structured-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-structured-content-length") + ) if response.status_code == 206: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) - response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) - response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) + response_headers["x-ms-structured-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-structured-content-length") + ) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - download.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore - def get_properties( + @distributed_trace + def get_properties( # pylint: disable=inconsistent-return-statements self, - snapshot=None, # type: Optional[str] - version_id=None, # type: Optional[str] - timeout=None, # type: Optional[int] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system properties for the blob. It does not return the content of the blob. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_get_properties_request( + url=self._config.url, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.head(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-creation-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-creation-time')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) - response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-incremental-copy']=self._deserialize('bool', response.headers.get('x-ms-incremental-copy')) - response_headers['x-ms-copy-destination-snapshot']=self._deserialize('str', response.headers.get('x-ms-copy-destination-snapshot')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-access-tier']=self._deserialize('str', response.headers.get('x-ms-access-tier')) - response_headers['x-ms-access-tier-inferred']=self._deserialize('bool', response.headers.get('x-ms-access-tier-inferred')) - response_headers['x-ms-archive-status']=self._deserialize('str', response.headers.get('x-ms-archive-status')) - response_headers['x-ms-access-tier-change-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-access-tier-change-time')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) - response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) - response_headers['x-ms-expiry-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-expiry-time')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) - response_headers['x-ms-rehydrate-priority']=self._deserialize('str', response.headers.get('x-ms-rehydrate-priority')) - response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-incremental-copy"] = self._deserialize( + "bool", response.headers.get("x-ms-incremental-copy") + ) + response_headers["x-ms-copy-destination-snapshot"] = self._deserialize( + "str", response.headers.get("x-ms-copy-destination-snapshot") + ) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-access-tier"] = self._deserialize("str", response.headers.get("x-ms-access-tier")) + response_headers["x-ms-access-tier-inferred"] = self._deserialize( + "bool", response.headers.get("x-ms-access-tier-inferred") + ) + response_headers["x-ms-archive-status"] = self._deserialize("str", response.headers.get("x-ms-archive-status")) + response_headers["x-ms-access-tier-change-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-access-tier-change-time") + ) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-rehydrate-priority"] = self._deserialize( + "str", response.headers.get("x-ms-rehydrate-priority") + ) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) if cls: - return cls(pipeline_response, None, response_headers) - - get_properties.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def delete( + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements self, - snapshot=None, # type: Optional[str] - version_id=None, # type: Optional[str] - timeout=None, # type: Optional[int] - delete_snapshots=None, # type: Optional[Union[str, "_models.DeleteSnapshotsOptionType"]] - request_id_parameter=None, # type: Optional[str] - blob_delete_type="Permanent", # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, + request_id_parameter: Optional[str] = None, + blob_delete_type: Literal["Permanent"] = "Permanent", + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is permanently removed from the storage account. If the storage account's soft delete feature is enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible immediately. However, the blob service retains the blob or snapshot for the number of days - specified by the DeleteRetentionPolicy section of [Storage service properties] (Set-Blob- - Service-Properties.md). After the specified number of days has passed, the blob's data is - permanently removed from the storage account. Note that you continue to be charged for the - soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify - the "include=deleted" query parameter to discover which blobs and snapshots have been soft - deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other + specified by the DeleteRetentionPolicy section of [Storage service properties] + (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's + data is permanently removed from the storage account. Note that you continue to be charged for + the soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and + specify the "include=deleted" query parameter to discover which blobs and snapshots have been + soft deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 (ResourceNotFound). :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param delete_snapshots: Required if the blob has associated snapshots. Specify one of the following two options: include: Delete the base blob and all of its snapshots. only: Delete - only the blob's snapshots and not the blob itself. + only the blob's snapshots and not the blob itself. Known values are: "include" and "only". + Default value is None. :type delete_snapshots: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param blob_delete_type: Optional. Only possible value is 'permanent', which specifies to - permanently delete a blob if blob soft delete is enabled. + permanently delete a blob if blob soft delete is enabled. Known values are "Permanent" and + None. Default value is "Permanent". :type blob_delete_type: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -527,800 +2120,634 @@ def delete( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_delete_request( + url=self._config.url, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + lease_id=_lease_id, + delete_snapshots=delete_snapshots, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_delete_type=blob_delete_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if blob_delete_type is not None: - query_parameters['deletetype'] = self._serialize.query("blob_delete_type", blob_delete_type, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if delete_snapshots is not None: - header_parameters['x-ms-delete-snapshots'] = self._serialize.header("delete_snapshots", delete_snapshots, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - delete.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def set_access_control( - self, - timeout=None, # type: Optional[int] - owner=None, # type: Optional[str] - group=None, # type: Optional[str] - posix_permissions=None, # type: Optional[str] - posix_acl=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None - """Set the owner, group, permissions, or access control list for a blob. + @distributed_trace + def undelete( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """Undelete a blob that was previously soft deleted. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param owner: Optional. The owner of the blob or directory. - :type owner: str - :param group: Optional. The owning group of the blob or directory. - :type group: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". - :type posix_acl: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - action = "setAccessControl" - accept = "application/xml" + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_undelete_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.set_access_control.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['action'] = self._serialize.query("action", action, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if owner is not None: - header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str') - if group is not None: - header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_acl is not None: - header_parameters['x-ms-acl'] = self._serialize.header("posix_acl", posix_acl, 'str') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.patch(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def get_access_control( + @distributed_trace + def set_expiry( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - upn=None, # type: Optional[bool] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None - """Get the owner, group, permissions, or access control list for a blob. + expiry_options: Union[str, _models.BlobExpiryOptions], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + expires_on: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """Sets the time a blob will expire and be deleted. + :param expiry_options: Required. Indicates mode of the expiry time. Known values are: + "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Required. + :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If - "true", the identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response - headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If - "false", the values will be returned as Azure Active Directory Object IDs. The default value is - false. - :type upn: bool :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param expires_on: The time to set the blob to expiry. Default value is None. + :type expires_on: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - action = "getAccessControl" - accept = "application/xml" + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_set_expiry_request( + url=self._config.url, + expiry_options=expiry_options, + timeout=timeout, + request_id_parameter=request_id_parameter, + expires_on=expires_on, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_access_control.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['action'] = self._serialize.query("action", action, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if upn is not None: - query_parameters['upn'] = self._serialize.query("upn", upn, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.head(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-owner']=self._deserialize('str', response.headers.get('x-ms-owner')) - response_headers['x-ms-group']=self._deserialize('str', response.headers.get('x-ms-group')) - response_headers['x-ms-permissions']=self._deserialize('str', response.headers.get('x-ms-permissions')) - response_headers['x-ms-acl']=self._deserialize('str', response.headers.get('x-ms-acl')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - get_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore - - def rename( + @distributed_trace + def set_http_headers( # pylint: disable=inconsistent-return-statements self, - rename_source, # type: str - timeout=None, # type: Optional[int] - path_rename_mode=None, # type: Optional[Union[str, "_models.PathRenameMode"]] - directory_properties=None, # type: Optional[str] - posix_permissions=None, # type: Optional[str] - posix_umask=None, # type: Optional[str] - source_lease_id=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - directory_http_headers=None, # type: Optional["_models.DirectoryHttpHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None - """Rename a blob/file. By default, the destination is overwritten and if the destination already - exists and has a lease the lease is broken. This operation supports conditional HTTP requests. - For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. To fail if the destination already exists, use a conditional - request with If-None-Match: "*". - - :param rename_source: The file or directory to be renamed. The value must have the following - format: "/{filesysystem}/{path}". If "x-ms-properties" is specified, the properties will - overwrite the existing properties; otherwise, the existing properties will be preserved. - :type rename_source: str + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """The Set HTTP Headers operation sets system properties on the blob. + :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param path_rename_mode: Determines the behavior of the rename operation. - :type path_rename_mode: str or ~azure.storage.blob.models.PathRenameMode - :param directory_properties: Optional. User-defined properties to be stored with the file or - directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", - where each value is base64 encoded. - :type directory_properties: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_umask: Only valid if Hierarchical Namespace is enabled for the account. This umask - restricts permission settings for file and directory, and will only be applied when default Acl - does not exist in parent directory. If the umask bit has set, it means that the corresponding - permission will be disabled. Otherwise the corresponding permission will be determined by the - permission. A 4-digit octal notation (e.g. 0022) is supported here. If no umask was specified, - a default umask - 0027 will be used. - :type posix_umask: str - :param source_lease_id: A lease ID for the source path. If specified, the source path must have - an active lease and the lease ID must match. - :type source_lease_id: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param directory_http_headers: Parameter group. - :type directory_http_headers: ~azure.storage.blob.models.DirectoryHttpHeaders - :param lease_access_conditions: Parameter group. + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _cache_control = None - _content_type = None - _content_encoding = None - _content_language = None - _content_disposition = None + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _blob_cache_control = None + _blob_content_type = None + _blob_content_md5 = None + _blob_content_encoding = None + _blob_content_language = None _lease_id = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - if directory_http_headers is not None: - _cache_control = directory_http_headers.cache_control - _content_type = directory_http_headers.content_type - _content_encoding = directory_http_headers.content_encoding - _content_language = directory_http_headers.content_language - _content_disposition = directory_http_headers.content_disposition + _if_tags = None + _blob_content_disposition = None + if blob_http_headers is not None: + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_content_type = blob_http_headers.blob_content_type if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match - if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_none_match = source_modified_access_conditions.source_if_none_match - accept = "application/xml" + _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_set_http_headers_request( + url=self._config.url, + timeout=timeout, + blob_cache_control=_blob_cache_control, + blob_content_type=_blob_content_type, + blob_content_md5=_blob_content_md5, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_content_disposition=_blob_content_disposition, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.rename.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if path_rename_mode is not None: - query_parameters['mode'] = self._serialize.query("path_rename_mode", path_rename_mode, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-rename-source'] = self._serialize.header("rename_source", rename_source, 'str') - if directory_properties is not None: - header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_umask is not None: - header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str') - if _cache_control is not None: - header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str') - if _content_type is not None: - header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str') - if _content_encoding is not None: - header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str') - if _content_language is not None: - header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str') - if _content_disposition is not None: - header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if source_lease_id is not None: - header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [201]: + if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - rename.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def undelete( + @distributed_trace + def set_immutability_policy( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None - """Undelete a blob that was previously soft deleted. + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """The Set Immutability Policy operation sets the immutability policy on the blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "undelete" - accept = "application/xml" - - # Construct URL - url = self.undelete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_set_immutability_policy_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + if_unmodified_since=_if_unmodified_since, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + snapshot=snapshot, + version_id=version_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) if cls: - return cls(pipeline_response, None, response_headers) - - undelete.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def set_expiry( + @distributed_trace + def delete_immutability_policy( # pylint: disable=inconsistent-return-statements self, - expiry_options, # type: Union[str, "_models.BlobExpiryOptions"] - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - expires_on=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None - """Sets the time a blob will expire and be deleted. + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """The Delete Immutability Policy operation deletes the immutability policy on the blob. - :param expiry_options: Required. Indicates mode of the expiry time. - :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param expires_on: The time to set the blob to expiry. - :type expires_on: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - comp = "expiry" - accept = "application/xml" - - # Construct URL - url = self.set_expiry.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str') - if expires_on is not None: - header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_delete_immutability_policy_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_expiry.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def set_http_headers( + @distributed_trace + def set_legal_hold( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None - """The Set HTTP Headers operation sets system properties on the blob. - + legal_hold: bool, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """The Set Legal Hold operation sets a legal hold on the blob. + + :param legal_hold: Specified if a legal hold should be set on the blob. Required. + :type legal_hold: bool :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_http_headers: Parameter group. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _blob_cache_control = None - _blob_content_type = None - _blob_content_md5 = None - _blob_content_encoding = None - _blob_content_language = None - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _blob_content_disposition = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_type = blob_http_headers.blob_content_type - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_disposition = blob_http_headers.blob_content_disposition - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - comp = "properties" - accept = "application/xml" + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["legalhold"] = kwargs.pop("comp", _params.pop("comp", "legalhold")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_set_legal_hold_request( + url=self._config.url, + legal_hold=legal_hold, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.set_http_headers.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - set_http_headers.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def set_metadata( + @distributed_trace + def set_metadata( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more name-value pairs. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -1328,687 +2755,663 @@ def set_metadata( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "metadata" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_set_metadata_request( + url=self._config.url, + timeout=timeout, + metadata=metadata, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.set_metadata.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - set_metadata.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def acquire_lease( + @distributed_trace + def acquire_lease( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - duration=None, # type: Optional[int] - proposed_lease_id=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease - duration cannot be changed using renew or change. + duration cannot be changed using renew or change. Default value is None. :type duration: int :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Default value is None. :type proposed_lease_id: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "acquire" - accept = "application/xml" - - # Construct URL - url = self.acquire_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if duration is not None: - header_parameters['x-ms-lease-duration'] = self._serialize.header("duration", duration, 'int') - if proposed_lease_id is not None: - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_acquire_lease_request( + url=self._config.url, + timeout=timeout, + duration=duration, + proposed_lease_id=proposed_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - acquire_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def release_lease( + @distributed_trace + def release_lease( # pylint: disable=inconsistent-return-statements self, - lease_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "release" - accept = "application/xml" - - # Construct URL - url = self.release_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_release_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - release_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def renew_lease( + @distributed_trace + def renew_lease( # pylint: disable=inconsistent-return-statements self, - lease_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "renew" - accept = "application/xml" - - # Construct URL - url = self.renew_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_renew_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - renew_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def change_lease( + @distributed_trace + def change_lease( # pylint: disable=inconsistent-return-statements self, - lease_id, # type: str - proposed_lease_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + lease_id: str, + proposed_lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Required. :type proposed_lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "change" - accept = "application/xml" - - # Construct URL - url = self.change_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_change_lease_request( + url=self._config.url, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - change_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def break_lease( + @distributed_trace + def break_lease( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - break_period=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + break_period: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param break_period: For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for - longer than the break period. If this header does not appear with a break operation, a fixed- - duration lease breaks after the remaining lease period elapses, and an infinite lease breaks - immediately. + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. Default value is None. :type break_period: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "break" - accept = "application/xml" - - # Construct URL - url = self.break_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if break_period is not None: - header_parameters['x-ms-lease-break-period'] = self._serialize.header("break_period", break_period, 'int') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_break_lease_request( + url=self._config.url, + timeout=timeout, + break_period=break_period, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - break_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def create_snapshot( + @distributed_trace + def create_snapshot( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + request_id_parameter: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Create Snapshot operation creates a read-only snapshot of a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -2016,36 +3419,41 @@ def create_snapshot( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -2054,114 +3462,103 @@ def create_snapshot( _if_tags = None _lease_id = None if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "snapshot" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + _request = build_create_snapshot_request( + url=self._config.url, + timeout=timeout, + metadata=metadata, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.create_snapshot.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-snapshot']=self._deserialize('str', response.headers.get('x-ms-snapshot')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers["x-ms-snapshot"] = self._deserialize("str", response.headers.get("x-ms-snapshot")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) if cls: - return cls(pipeline_response, None, response_headers) - - create_snapshot.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def start_copy_from_url( + @distributed_trace + def start_copy_from_url( # pylint: disable=inconsistent-return-statements self, - copy_source, # type: str - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] - rehydrate_priority=None, # type: Optional[Union[str, "_models.RehydratePriority"]] - request_id_parameter=None, # type: Optional[str] - blob_tags_string=None, # type: Optional[str] - seal_blob=None, # type: Optional[bool] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + copy_source: str, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + seal_blob: Optional[bool] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Start Copy From URL operation copies a blob or an internet resource to a new blob. :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -2169,38 +3566,57 @@ def start_copy_from_url( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived - blob. + blob. Known values are: "High" and "Standard". Default value is None. :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str :param seal_blob: Overrides the sealed state of the destination blob. Service version - 2019-12-12 and newer. + 2019-12-12 and newer. Default value is None. :type seal_blob: bool - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :param modified_access_conditions: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + _source_if_modified_since = None _source_if_unmodified_since = None _source_if_match = None @@ -2212,125 +3628,113 @@ def start_copy_from_url( _if_none_match = None _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_tags = source_modified_access_conditions.source_if_tags - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + if modified_access_conditions is not None: + _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + _request = build_start_copy_from_url_request( + url=self._config.url, + copy_source=copy_source, + timeout=timeout, + metadata=metadata, + tier=tier, + rehydrate_priority=rehydrate_priority, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + source_if_tags=_source_if_tags, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + seal_blob=seal_blob, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.start_copy_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if rehydrate_priority is not None: - header_parameters['x-ms-rehydrate-priority'] = self._serialize.header("rehydrate_priority", rehydrate_priority, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - if _source_if_tags is not None: - header_parameters['x-ms-source-if-tags'] = self._serialize.header("source_if_tags", _source_if_tags, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - if seal_blob is not None: - header_parameters['x-ms-seal-blob'] = self._serialize.header("seal_blob", seal_blob, 'bool') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - start_copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def copy_from_url( + @distributed_trace + def copy_from_url( # pylint: disable=inconsistent-return-statements self, - copy_source, # type: str - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] - request_id_parameter=None, # type: Optional[str] - source_content_md5=None, # type: Optional[bytearray] - blob_tags_string=None, # type: Optional[str] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + copy_source: str, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + request_id_parameter: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return a response until the copy is complete. :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -2338,35 +3742,66 @@ def copy_from_url( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + from the copy source. Default value is None. + :type source_content_md5: bytes + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :param modified_access_conditions: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and + "COPY". Default value is None. + :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + x_ms_requires_sync: Literal["true"] = kwargs.pop( + "x_ms_requires_sync", _headers.pop("x-ms-requires-sync", "true") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + _source_if_modified_since = None _source_if_unmodified_since = None _source_if_match = None @@ -2377,787 +3812,829 @@ def copy_from_url( _if_none_match = None _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id + _encryption_scope = None + if source_modified_access_conditions is not None: + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_none_match = source_modified_access_conditions.source_if_none_match + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_none_match = source_modified_access_conditions.source_if_none_match - x_ms_requires_sync = "true" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + + _request = build_copy_from_url_request( + url=self._config.url, + copy_source=copy_source, + timeout=timeout, + metadata=metadata, + tier=tier, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + copy_source_authorization=copy_source_authorization, + encryption_scope=_encryption_scope, + copy_source_tags=copy_source_tags, + x_ms_requires_sync=x_ms_requires_sync, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.copy_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-requires-sync'] = self._serialize.header("x_ms_requires_sync", x_ms_requires_sync, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def abort_copy_from_url( + @distributed_trace + def abort_copy_from_url( # pylint: disable=inconsistent-return-statements self, - copy_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + copy_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination blob with zero length and full metadata. :param copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy - Blob operation. + Blob operation. Required. :type copy_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["copy"] = kwargs.pop("comp", _params.pop("comp", "copy")) + copy_action_abort_constant: Literal["abort"] = kwargs.pop( + "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id - comp = "copy" - copy_action_abort_constant = "abort" - accept = "application/xml" - - # Construct URL - url = self.abort_copy_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['copyid'] = self._serialize.query("copy_id", copy_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-copy-action'] = self._serialize.header("copy_action_abort_constant", copy_action_abort_constant, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_abort_copy_from_url_request( + url=self._config.url, + copy_id=copy_id, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + comp=comp, + copy_action_abort_constant=copy_action_abort_constant, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - abort_copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def set_tier( + @distributed_trace + def set_tier( # pylint: disable=inconsistent-return-statements self, - tier, # type: Union[str, "_models.AccessTierRequired"] - snapshot=None, # type: Optional[str] - version_id=None, # type: Optional[str] - timeout=None, # type: Optional[int] - rehydrate_priority=None, # type: Optional[Union[str, "_models.RehydratePriority"]] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + tier: Union[str, _models.AccessTierRequired], + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium storage account and on a block blob in a blob storage account (locally redundant storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's ETag. - :param tier: Indicates the tier to be set on the blob. + :param tier: Indicates the tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and "Cold". + Required. :type tier: str or ~azure.storage.blob.models.AccessTierRequired :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived - blob. + blob. Known values are: "High" and "Standard". Default value is None. :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tier"] = kwargs.pop("comp", _params.pop("comp", "tier")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_tags = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "tier" - accept = "application/xml" - # Construct URL - url = self.set_tier.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if rehydrate_priority is not None: - header_parameters['x-ms-rehydrate-priority'] = self._serialize.header("rehydrate_priority", rehydrate_priority, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_set_tier_request( + url=self._config.url, + tier=tier, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + rehydrate_priority=rehydrate_priority, + request_id_parameter=request_id_parameter, + lease_id=_lease_id, + if_tags=_if_tags, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - if response.status_code == 200: - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - - if response.status_code == 202: - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) if cls: - return cls(pipeline_response, None, response_headers) - - set_tier.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def get_account_info( - self, - **kwargs # type: Any - ): - # type: (...) -> None + @distributed_trace + def get_account_info( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Returns the sku name and account kind. - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "account" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_account_info.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_get_account_info_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) - response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) if cls: - return cls(pipeline_response, None, response_headers) - - get_account_info.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def query( self, - snapshot=None, # type: Optional[str] - timeout=None, # type: Optional[int] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - query_request=None, # type: Optional["_models.QueryRequest"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> IO + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + query_request: Optional[_models.QueryRequest] = None, + **kwargs: Any + ) -> Iterator[bytes]: + # pylint: disable=line-too-long """The Query operation enables users to select/project on blob data by providing simple query expressions. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param query_request: the query request. - :type query_request: ~azure.storage.blob.models.QueryRequest - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IO, or the result of cls(response) - :rtype: IO - :raises: ~azure.core.exceptions.HttpResponseError + :param query_request: the query request. Default value is None. + :type query_request: ~azure.storage.blob.models.QueryRequest + :return: Iterator[bytes] or the result of cls(response) + :rtype: Iterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[IO] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["query"] = kwargs.pop("comp", _params.pop("comp", "query")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "query" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.query.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] + _if_unmodified_since = modified_access_conditions.if_unmodified_since if query_request is not None: - body_content = self._serialize.body(query_request, 'QueryRequest', is_xml=True) + _content = self._serialize.body(query_request, "QueryRequest", is_xml=True) else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs) + _content = None + + _request = build_query_request( + url=self._config.url, + snapshot=snapshot, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} if response.status_code == 200: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) if response.status_code == 206: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - query.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore + @distributed_trace def get_tags( self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - snapshot=None, # type: Optional[str] - version_id=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> "_models.BlobTags" + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> _models.BlobTags: + # pylint: disable=line-too-long """The Get Tags operation enables users to get the tags associated with a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: BlobTags, or the result of cls(response) + :return: BlobTags or the result of cls(response) :rtype: ~azure.storage.blob.models.BlobTags - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobTags"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) + cls: ClsType[_models.BlobTags] = kwargs.pop("cls", None) + _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "tags" - accept = "application/xml" + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + _request = build_get_tags_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + if_tags=_if_tags, + lease_id=_lease_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_tags.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('BlobTags', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("BlobTags", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_tags.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore - def set_tags( + @distributed_trace + def set_tags( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - version_id=None, # type: Optional[str] - transactional_content_md5=None, # type: Optional[bytearray] - transactional_content_crc64=None, # type: Optional[bytearray] - request_id_parameter=None, # type: Optional[str] - tags=None, # type: Optional["_models.BlobTags"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + version_id: Optional[str] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + tags: Optional[_models.BlobTags] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Set Tags operation enables users to set tags on a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param tags: Blob tags. - :type tags: ~azure.storage.blob.models.BlobTags - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param tags: Blob tags. Default value is None. + :type tags: ~azure.storage.blob.models.BlobTags + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "tags" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.set_tags.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if tags is not None: - body_content = self._serialize.body(tags, 'BlobTags', is_xml=True) + _content = self._serialize.body(tags, "BlobTags", is_xml=True) else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _content = None + + _request = build_set_tags_request( + url=self._config.url, + timeout=timeout, + version_id=version_id, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + request_id_parameter=request_id_parameter, + if_tags=_if_tags, + lease_id=_lease_id, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_tags.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_block_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_block_blob_operations.py index 7bb13abc2b7f..b0c20b665f1e 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_block_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_block_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,120 +7,792 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from .. import models as _models +from .._serialization import Serializer + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_upload_request( + url: str, + *, + content_length: int, + content: IO[bytes], + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[bytes] = None, + blob_cache_control: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + transactional_content_crc64: Optional[bytes] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-blob-type"] = _SERIALIZER.header("blob_type", blob_type, "str") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "bytearray" + ) + if structured_body_type is not None: + _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") + if structured_content_length is not None: + _headers["x-ms-structured-content-length"] = _SERIALIZER.header( + "structured_content_length", structured_content_length, "int" + ) + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_put_blob_from_url_request( + url: str, + *, + content_length: int, + copy_source: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[bytes] = None, + blob_cache_control: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + blob_tags_string: Optional[str] = None, + copy_source_blob_properties: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-blob-type"] = _SERIALIZER.header("blob_type", blob_type, "str") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + if source_if_tags is not None: + _headers["x-ms-source-if-tags"] = _SERIALIZER.header("source_if_tags", source_if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") + if copy_source_blob_properties is not None: + _headers["x-ms-copy-source-blob-properties"] = _SERIALIZER.header( + "copy_source_blob_properties", copy_source_blob_properties, "bool" + ) + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + if copy_source_tags is not None: + _headers["x-ms-copy-source-tag-option"] = _SERIALIZER.header("copy_source_tags", copy_source_tags, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_stage_block_request( + url: str, + *, + block_id: str, + content_length: int, + content: IO[bytes], + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + request_id_parameter: Optional[str] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["blockid"] = _SERIALIZER.query("block_id", block_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "bytearray" + ) + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if structured_body_type is not None: + _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") + if structured_content_length is not None: + _headers["x-ms-structured-content-length"] = _SERIALIZER.header( + "structured_content_length", structured_content_length, "int" + ) + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_stage_block_from_url_request( + url: str, + *, + block_id: str, + content_length: int, + source_url: str, + source_range: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, + timeout: Optional[int] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + lease_id: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["blockid"] = _SERIALIZER.query("block_id", block_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + _headers["x-ms-copy-source"] = _SERIALIZER.header("source_url", source_url, "str") + if source_range is not None: + _headers["x-ms-source-range"] = _SERIALIZER.header("source_range", source_range, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") + if source_contentcrc64 is not None: + _headers["x-ms-source-content-crc64"] = _SERIALIZER.header( + "source_contentcrc64", source_contentcrc64, "bytearray" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_commit_block_list_request( + url: str, + *, + content: Any, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[bytes] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "bytearray" + ) + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_get_block_list_request( + url: str, + *, + snapshot: Optional[str] = None, + list_type: Union[str, _models.BlockListType] = "committed", + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + _params["blocklisttype"] = _SERIALIZER.query("list_type", list_type, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class BlockBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class BlockBlobOperations(object): - """BlockBlobOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`block_blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def upload( + @distributed_trace + def upload( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - body, # type: IO - timeout=None, # type: Optional[int] - transactional_content_md5=None, # type: Optional[bytearray] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] - request_id_parameter=None, # type: Optional[str] - blob_tags_string=None, # type: Optional[str] - blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + body: IO[bytes], + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + transactional_content_crc64: Optional[bytes] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Upload Block Blob operation updates the content of an existing block blob. Updating an existing block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a partial update of the content of a block blob, use the Put Block List operation. - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytes + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -129,6 +802,7 @@ def upload( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -136,210 +810,215 @@ def upload( _if_none_match = None _if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - blob_type = "BlockBlob" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.upload.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = body + + _request = build_upload_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + transactional_content_crc64=transactional_content_crc64, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + blob_type=blob_type, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) - - upload.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def put_blob_from_url( + @distributed_trace + def put_blob_from_url( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - copy_source, # type: str - timeout=None, # type: Optional[int] - transactional_content_md5=None, # type: Optional[bytearray] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] - request_id_parameter=None, # type: Optional[str] - source_content_md5=None, # type: Optional[bytearray] - blob_tags_string=None, # type: Optional[str] - copy_source_blob_properties=None, # type: Optional[bool] - blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + copy_source: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + request_id_parameter: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + blob_tags_string: Optional[str] = None, + copy_source_blob_properties: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are not supported with Put Blob from URL; the content of an existing blob is overwritten with the content of the new blob. To perform partial updates to a block blob’s contents using a source URL, use the Put Block from URL API in conjunction with Put Block List. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + from the copy source. Default value is None. + :type source_content_md5: bytes + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str :param copy_source_blob_properties: Optional, default is true. Indicates if properties from - the source blob should be copied. + the source blob should be copied. Default value is None. :type copy_source_blob_properties: bool - :param blob_http_headers: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and + "COPY". Default value is None. + :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -349,6 +1028,7 @@ def put_blob_from_url( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -361,345 +1041,344 @@ def put_blob_from_url( _source_if_none_match = None _source_if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_tags = source_modified_access_conditions.source_if_tags - blob_type = "BlockBlob" - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_put_blob_from_url_request( + url=self._config.url, + content_length=content_length, + copy_source=copy_source, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + source_if_tags=_source_if_tags, + request_id_parameter=request_id_parameter, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + copy_source_blob_properties=copy_source_blob_properties, + copy_source_authorization=copy_source_authorization, + copy_source_tags=copy_source_tags, + blob_type=blob_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.put_blob_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - if _source_if_tags is not None: - header_parameters['x-ms-source-if-tags'] = self._serialize.header("source_if_tags", _source_if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - if copy_source_blob_properties is not None: - header_parameters['x-ms-copy-source-blob-properties'] = self._serialize.header("copy_source_blob_properties", copy_source_blob_properties, 'bool') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - put_blob_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def stage_block( + @distributed_trace + def stage_block( # pylint: disable=inconsistent-return-statements self, - block_id, # type: str - content_length, # type: int - body, # type: IO - transactional_content_md5=None, # type: Optional[bytearray] - transactional_content_crc64=None, # type: Optional[bytearray] - timeout=None, # type: Optional[int] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - **kwargs # type: Any - ): - # type: (...) -> None + block_id: str, + content_length: int, + body: IO[bytes], + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Stage Block operation creates a new block to be committed as part of a blob. :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the string must be less than or equal to 64 bytes in size. For a given blob, the length of the - value specified for the blockid parameter must be the same size for each block. + value specified for the blockid parameter must be the same size for each block. Required. :type block_id: str - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - comp = "block" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.stage_block.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['blockid'] = self._serialize.query("block_id", block_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _content = body + + _request = build_stage_block_request( + url=self._config.url, + block_id=block_id, + content_length=content_length, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + request_id_parameter=request_id_parameter, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) - - stage_block.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def stage_block_from_url( + @distributed_trace + def stage_block_from_url( # pylint: disable=inconsistent-return-statements self, - block_id, # type: str - content_length, # type: int - source_url, # type: str - source_range=None, # type: Optional[str] - source_content_md5=None, # type: Optional[bytearray] - source_contentcrc64=None, # type: Optional[bytearray] - timeout=None, # type: Optional[int] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + block_id: str, + content_length: int, + source_url: str, + source_range: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Stage Block operation creates a new block to be committed as part of a blob where the contents are read from a URL. :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the string must be less than or equal to 64 bytes in size. For a given blob, the length of the - value specified for the blockid parameter must be the same size for each block. + value specified for the blockid parameter must be the same size for each block. Required. :type block_id: str - :param content_length: The length of the request. - :type content_length: long - :param source_url: Specify a URL to the copy source. + :param content_length: The length of the request. Required. + :type content_length: int + :param source_url: Specify a URL to the copy source. Required. :type source_url: str - :param source_range: Bytes of source data in the specified range. + :param source_range: Bytes of source data in the specified range. Default value is None. :type source_range: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray + from the copy source. Default value is None. + :type source_content_md5: bytes :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. - :type source_contentcrc64: bytearray + read from the copy source. Default value is None. + :type source_contentcrc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _lease_id = None _source_if_modified_since = None @@ -707,6 +1386,7 @@ def stage_block_from_url( _source_if_match = None _source_if_none_match = None if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: @@ -714,104 +1394,96 @@ def stage_block_from_url( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match - comp = "block" - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_stage_block_from_url_request( + url=self._config.url, + block_id=block_id, + content_length=content_length, + source_url=source_url, + source_range=source_range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.stage_block_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['blockid'] = self._serialize.query("block_id", block_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') - if source_range is not None: - header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if source_contentcrc64 is not None: - header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - stage_block_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def commit_block_list( + @distributed_trace + def commit_block_list( # pylint: disable=inconsistent-return-statements self, - blocks, # type: "_models.BlockLookupList" - timeout=None, # type: Optional[int] - transactional_content_md5=None, # type: Optional[bytearray] - transactional_content_crc64=None, # type: Optional[bytearray] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] - request_id_parameter=None, # type: Optional[str] - blob_tags_string=None, # type: Optional[str] - blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + blocks: _models.BlockLookupList, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Commit Block List operation writes a blob by specifying the list of block IDs that make up the blob. In order to be written as part of a blob, a block must have been successfully written to the server in a prior Put Block operation. You can call Put Block List to update a blob by @@ -820,58 +1492,75 @@ def commit_block_list( or from the uncommitted block list, or to commit the most recently uploaded version of the block, whichever list it may belong to. - :param blocks: + :param blocks: Blob Blocks. Required. :type blocks: ~azure.storage.blob.models.BlockLookupList :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_cache_control = None _blob_content_type = None _blob_content_encoding = None @@ -881,6 +1570,7 @@ def commit_block_list( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -889,225 +1579,210 @@ def commit_block_list( _if_tags = None if blob_http_headers is not None: _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_type = blob_http_headers.blob_content_type + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "blocklist" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.commit_block_list.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(blocks, 'BlockLookupList', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = self._serialize.body(blocks, "BlockLookupList", is_xml=True) + + _request = build_commit_block_list_request( + url=self._config.url, + timeout=timeout, + blob_cache_control=_blob_cache_control, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - commit_block_list.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def get_block_list( self, - snapshot=None, # type: Optional[str] - list_type="committed", # type: Union[str, "_models.BlockListType"] - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> "_models.BlockList" + snapshot: Optional[str] = None, + list_type: Union[str, _models.BlockListType] = "committed", + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> _models.BlockList: + # pylint: disable=line-too-long """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block blob. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param list_type: Specifies whether to return the list of committed blocks, the list of - uncommitted blocks, or both lists together. + uncommitted blocks, or both lists together. Known values are: "committed", "uncommitted", and + "all". Default value is "committed". :type list_type: str or ~azure.storage.blob.models.BlockListType :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: BlockList, or the result of cls(response) + :return: BlockList or the result of cls(response) :rtype: ~azure.storage.blob.models.BlockList - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.BlockList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) + cls: ClsType[_models.BlockList] = kwargs.pop("cls", None) + _lease_id = None _if_tags = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "blocklist" - accept = "application/xml" - # Construct URL - url = self.get_block_list.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - query_parameters['blocklisttype'] = self._serialize.query("list_type", list_type, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_get_block_list_request( + url=self._config.url, + snapshot=snapshot, + list_type=list_type, + timeout=timeout, + lease_id=_lease_id, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('BlockList', pipeline_response) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("BlockList", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_block_list.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_container_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_container_operations.py index 41a1c8aa2daf..e68bf2cdb49a 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_container_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_container_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,60 +7,921 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Iterator, List, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from .. import models as _models +from .._serialization import Serializer + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_create_request( + url: str, + *, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + access: Optional[Union[str, _models.PublicAccessType]] = None, + request_id_parameter: Optional[str] = None, + default_encryption_scope: Optional[str] = None, + prevent_encryption_scope_override: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if access is not None: + _headers["x-ms-blob-public-access"] = _SERIALIZER.header("access", access, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if default_encryption_scope is not None: + _headers["x-ms-default-encryption-scope"] = _SERIALIZER.header( + "default_encryption_scope", default_encryption_scope, "str" + ) + if prevent_encryption_scope_override is not None: + _headers["x-ms-deny-encryption-scope-override"] = _SERIALIZER.header( + "prevent_encryption_scope_override", prevent_encryption_scope_override, "bool" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_properties_request( + url: str, + *, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + url: str, + *, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_metadata_request( + url: str, + *, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, + if_modified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_access_policy_request( + url: str, + *, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_access_policy_request( + url: str, + *, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + access: Optional[Union[str, _models.PublicAccessType]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if access is not None: + _headers["x-ms-blob-public-access"] = _SERIALIZER.header("access", access, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_restore_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + deleted_container_name: Optional[str] = None, + deleted_container_version: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if deleted_container_name is not None: + _headers["x-ms-deleted-container-name"] = _SERIALIZER.header( + "deleted_container_name", deleted_container_name, "str" + ) + if deleted_container_version is not None: + _headers["x-ms-deleted-container-version"] = _SERIALIZER.header( + "deleted_container_version", deleted_container_version, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_rename_request( + url: str, + *, + source_container_name: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + source_lease_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["x-ms-source-container-name"] = _SERIALIZER.header("source_container_name", source_container_name, "str") + if source_lease_id is not None: + _headers["x-ms-source-lease-id"] = _SERIALIZER.header("source_lease_id", source_lease_id, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_submit_batch_request( + url: str, + *, + content_length: int, + content: IO[bytes], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) + multipart_content_type: Optional[str] = kwargs.pop("multipart_content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if multipart_content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("multipart_content_type", multipart_content_type, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_filter_blobs_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if where is not None: + _params["where"] = _SERIALIZER.query("where", where, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_acquire_lease_request( + url: str, + *, + timeout: Optional[int] = None, + duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + if duration is not None: + _headers["x-ms-lease-duration"] = _SERIALIZER.header("duration", duration, "int") + if proposed_lease_id is not None: + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_release_lease_request( + url: str, + *, + lease_id: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_renew_lease_request( + url: str, + *, + lease_id: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_break_lease_request( + url: str, + *, + timeout: Optional[int] = None, + break_period: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + if break_period is not None: + _headers["x-ms-lease-break-period"] = _SERIALIZER.header("break_period", break_period, "int") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_change_lease_request( + url: str, + *, + lease_id: str, + proposed_lease_id: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_blob_flat_segment_request( + url: str, + *, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if prefix is not None: + _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long + url: str, + *, + delimiter: str, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if prefix is not None: + _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") + _params["delimiter"] = _SERIALIZER.query("delimiter", delimiter, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_account_info_request( + url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class ContainerOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class ContainerOperations(object): - """ContainerOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`container` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def create( + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - access=None, # type: Optional[Union[str, "_models.PublicAccessType"]] - request_id_parameter=None, # type: Optional[str] - container_cpk_scope_info=None, # type: Optional["_models.ContainerCpkScopeInfo"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + access: Optional[Union[str, _models.PublicAccessType]] = None, + request_id_parameter: Optional[str] = None, + container_cpk_scope_info: Optional[_models.ContainerCpkScopeInfo] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """creates a new container under the specified account. If the container with the same name already exists, the operation fails. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -67,209 +929,227 @@ def create( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str + information. Default value is None. + :type metadata: dict[str, str] :param access: Specifies whether data in the container may be accessed publicly and the level - of access. + of access. Known values are: "container" and "blob". Default value is None. :type access: str or ~azure.storage.blob.models.PublicAccessType :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param container_cpk_scope_info: Parameter group. + :param container_cpk_scope_info: Parameter group. Default value is None. :type container_cpk_scope_info: ~azure.storage.blob.models.ContainerCpkScopeInfo - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + cls: ClsType[None] = kwargs.pop("cls", None) + _default_encryption_scope = None _prevent_encryption_scope_override = None if container_cpk_scope_info is not None: _default_encryption_scope = container_cpk_scope_info.default_encryption_scope _prevent_encryption_scope_override = container_cpk_scope_info.prevent_encryption_scope_override - restype = "container" - accept = "application/xml" - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if access is not None: - header_parameters['x-ms-blob-public-access'] = self._serialize.header("access", access, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _default_encryption_scope is not None: - header_parameters['x-ms-default-encryption-scope'] = self._serialize.header("default_encryption_scope", _default_encryption_scope, 'str') - if _prevent_encryption_scope_override is not None: - header_parameters['x-ms-deny-encryption-scope-override'] = self._serialize.header("prevent_encryption_scope_override", _prevent_encryption_scope_override, 'bool') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_create_request( + url=self._config.url, + timeout=timeout, + metadata=metadata, + access=access, + request_id_parameter=request_id_parameter, + default_encryption_scope=_default_encryption_scope, + prevent_encryption_scope_override=_prevent_encryption_scope_override, + restype=restype, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - create.metadata = {'url': '/{containerName}'} # type: ignore - - def get_properties( + @distributed_trace + def get_properties( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """returns all user-defined metadata and system properties for the specified container. The data returned does not include the container's list of blobs. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id - restype = "container" - accept = "application/xml" - # Construct URL - url = self.get_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_get_properties_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + restype=restype, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) - response_headers['x-ms-has-immutability-policy']=self._deserialize('bool', response.headers.get('x-ms-has-immutability-policy')) - response_headers['x-ms-has-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-has-legal-hold')) - response_headers['x-ms-default-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-default-encryption-scope')) - response_headers['x-ms-deny-encryption-scope-override']=self._deserialize('bool', response.headers.get('x-ms-deny-encryption-scope-override')) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-public-access"] = self._deserialize( + "str", response.headers.get("x-ms-blob-public-access") + ) + response_headers["x-ms-has-immutability-policy"] = self._deserialize( + "bool", response.headers.get("x-ms-has-immutability-policy") + ) + response_headers["x-ms-has-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-has-legal-hold")) + response_headers["x-ms-default-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-default-encryption-scope") + ) + response_headers["x-ms-deny-encryption-scope-override"] = self._deserialize( + "bool", response.headers.get("x-ms-deny-encryption-scope-override") + ) + response_headers["x-ms-immutable-storage-with-versioning-enabled"] = self._deserialize( + "bool", response.headers.get("x-ms-immutable-storage-with-versioning-enabled") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - get_properties.metadata = {'url': '/{containerName}'} # type: ignore - - def delete( + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """operation marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -278,70 +1158,61 @@ def delete( if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - restype = "container" - accept = "application/xml" - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_delete_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + restype=restype, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - delete.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def set_metadata( + @distributed_trace + def set_metadata( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """operation sets one or more user-defined name-value pairs for the specified container. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -349,212 +1220,226 @@ def set_metadata( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since - restype = "container" - comp = "metadata" - accept = "application/xml" - - # Construct URL - url = self.set_metadata.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_set_metadata_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + metadata=metadata, + if_modified_since=_if_modified_since, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_metadata.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def get_access_policy( self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> List["_models.SignedIdentifier"] + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> List[_models.SignedIdentifier]: + # pylint: disable=line-too-long """gets the permissions for the specified container. The permissions indicate whether container data may be accessed publicly. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: list of SignedIdentifier, or the result of cls(response) + :return: list of SignedIdentifier or the result of cls(response) :rtype: list[~azure.storage.blob.models.SignedIdentifier] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[List["_models.SignedIdentifier"]] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) + cls: ClsType[List[_models.SignedIdentifier]] = kwargs.pop("cls", None) + _lease_id = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id - restype = "container" - comp = "acl" - accept = "application/xml" - - # Construct URL - url = self.get_access_policy.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_get_access_policy_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('[SignedIdentifier]', pipeline_response) + response_headers["x-ms-blob-public-access"] = self._deserialize( + "str", response.headers.get("x-ms-blob-public-access") + ) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_access_policy.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore - def set_access_policy( + @distributed_trace + def set_access_policy( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - access=None, # type: Optional[Union[str, "_models.PublicAccessType"]] - request_id_parameter=None, # type: Optional[str] - container_acl=None, # type: Optional[List["_models.SignedIdentifier"]] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + access: Optional[Union[str, _models.PublicAccessType]] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + container_acl: Optional[List[_models.SignedIdentifier]] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """sets the permissions for the specified container. The permissions indicate whether blobs in a container may be accessed publicly. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param access: Specifies whether data in the container may be accessed publicly and the level - of access. + of access. Known values are: "container" and "blob". Default value is None. :type access: str or ~azure.storage.blob.models.PublicAccessType :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param container_acl: the acls for the container. - :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param container_acl: the acls for the container. Default value is None. + :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -563,920 +1448,1199 @@ def set_access_policy( if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - restype = "container" - comp = "acl" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.set_access_policy.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if access is not None: - header_parameters['x-ms-blob-public-access'] = self._serialize.header("access", access, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - serialization_ctxt = {'xml': {'name': 'SignedIdentifiers', 'wrapped': True, 'itemsName': 'SignedIdentifier'}} + serialization_ctxt = {"xml": {"name": "SignedIdentifiers", "wrapped": True, "itemsName": "SignedIdentifier"}} if container_acl is not None: - body_content = self._serialize.body(container_acl, '[SignedIdentifier]', is_xml=True, serialization_ctxt=serialization_ctxt) + _content = self._serialize.body( + container_acl, "[SignedIdentifier]", is_xml=True, serialization_ctxt=serialization_ctxt + ) else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _content = None + + _request = build_set_access_policy_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + access=access, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_access_policy.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def restore( + @distributed_trace + def restore( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - deleted_container_name=None, # type: Optional[str] - deleted_container_version=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + deleted_container_name: Optional[str] = None, + deleted_container_version: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Restores a previously-deleted container. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param deleted_container_name: Optional. Version 2019-12-12 and later. Specifies the name of - the deleted container to restore. + the deleted container to restore. Default value is None. :type deleted_container_name: str :param deleted_container_version: Optional. Version 2019-12-12 and later. Specifies the - version of the deleted container to restore. + version of the deleted container to restore. Default value is None. :type deleted_container_version: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "container" - comp = "undelete" - accept = "application/xml" - - # Construct URL - url = self.restore.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if deleted_container_name is not None: - header_parameters['x-ms-deleted-container-name'] = self._serialize.header("deleted_container_name", deleted_container_name, 'str') - if deleted_container_version is not None: - header_parameters['x-ms-deleted-container-version'] = self._serialize.header("deleted_container_version", deleted_container_version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_restore_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + deleted_container_name=deleted_container_name, + deleted_container_version=deleted_container_version, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def rename( # pylint: disable=inconsistent-return-statements + self, + source_container_name: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + source_lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """Renames an existing container. + + :param source_container_name: Required. Specifies the name of the container to rename. + Required. + :type source_container_name: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param source_lease_id: A lease ID for the source path. If specified, the source path must have + an active lease and the lease ID must match. Default value is None. + :type source_lease_id: str + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_rename_request( + url=self._config.url, + source_container_name=source_container_name, + timeout=timeout, + request_id_parameter=request_id_parameter, + source_lease_id=source_lease_id, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def submit_batch( + self, + content_length: int, + body: IO[bytes], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> Iterator[bytes]: + # pylint: disable=line-too-long + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: Iterator[bytes] or the result of cls(response) + :rtype: Iterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) + multipart_content_type: str = kwargs.pop( + "multipart_content_type", _headers.pop("Content-Type", "application/xml") + ) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _content = body + + _request = build_submit_batch_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + multipart_content_type=multipart_content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - restore.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore - def acquire_lease( + @distributed_trace + def filter_blobs( self, - timeout=None, # type: Optional[int] - duration=None, # type: Optional[int] - proposed_lease_id=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, + **kwargs: Any + ) -> _models.FilterBlobSegment: + # pylint: disable=line-too-long + """The Filter Blobs operation enables callers to list blobs in a container whose tags match a + given search expression. Filter blobs searches within the given container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param where: Filters the results to return only to return only blobs whose tags match the + specified expression. Default value is None. + :type where: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :type include: list[str or ~azure.storage.blob.models.FilterBlobsIncludeItem] + :return: FilterBlobSegment or the result of cls(response) + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) + cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) + + _request = build_filter_blobs_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + where=where, + marker=marker, + maxresults=maxresults, + include=include, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def acquire_lease( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease - duration cannot be changed using renew or change. + duration cannot be changed using renew or change. Default value is None. :type duration: int :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Default value is None. :type proposed_lease_id: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "acquire" - accept = "application/xml" - - # Construct URL - url = self.acquire_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if duration is not None: - header_parameters['x-ms-lease-duration'] = self._serialize.header("duration", duration, 'int') - if proposed_lease_id is not None: - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_acquire_lease_request( + url=self._config.url, + timeout=timeout, + duration=duration, + proposed_lease_id=proposed_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - acquire_lease.metadata = {'url': '/{containerName}'} # type: ignore - - def release_lease( + @distributed_trace + def release_lease( # pylint: disable=inconsistent-return-statements self, - lease_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "release" - accept = "application/xml" - - # Construct URL - url = self.release_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_release_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - release_lease.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def renew_lease( + @distributed_trace + def renew_lease( # pylint: disable=inconsistent-return-statements self, - lease_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "renew" - accept = "application/xml" - - # Construct URL - url = self.renew_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_renew_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - renew_lease.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def break_lease( + @distributed_trace + def break_lease( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - break_period=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + break_period: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param break_period: For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for - longer than the break period. If this header does not appear with a break operation, a fixed- - duration lease breaks after the remaining lease period elapses, and an infinite lease breaks - immediately. + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. Default value is None. :type break_period: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "break" - accept = "application/xml" - - # Construct URL - url = self.break_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if break_period is not None: - header_parameters['x-ms-lease-break-period'] = self._serialize.header("break_period", break_period, 'int') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_break_lease_request( + url=self._config.url, + timeout=timeout, + break_period=break_period, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - break_lease.metadata = {'url': '/{containerName}'} # type: ignore - - def change_lease( + @distributed_trace + def change_lease( # pylint: disable=inconsistent-return-statements self, - lease_id, # type: str - proposed_lease_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + lease_id: str, + proposed_lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Required. :type proposed_lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "change" - accept = "application/xml" - - # Construct URL - url = self.change_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_change_lease_request( + url=self._config.url, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - change_lease.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def list_blob_flat_segment( self, - prefix=None, # type: Optional[str] - marker=None, # type: Optional[str] - maxresults=None, # type: Optional[int] - include=None, # type: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "_models.ListBlobsFlatSegmentResponse" + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> _models.ListBlobsFlatSegmentResponse: + # pylint: disable=line-too-long """[Update] The List Blobs operation returns a list of the blobs under the specified container. :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. + specified prefix. Default value is None. :type prefix: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int :param include: Include this parameter to specify one or more datasets to include in the - response. + response. Default value is None. :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListBlobsFlatSegmentResponse, or the result of cls(response) + :return: ListBlobsFlatSegmentResponse or the result of cls(response) :rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsFlatSegmentResponse"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - restype = "container" - comp = "list" - accept = "application/xml" - - # Construct URL - url = self.list_blob_flat_segment.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if prefix is not None: - query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - if include is not None: - query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + cls: ClsType[_models.ListBlobsFlatSegmentResponse] = kwargs.pop("cls", None) + + _request = build_list_blob_flat_segment_request( + url=self._config.url, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('ListBlobsFlatSegmentResponse', pipeline_response) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("ListBlobsFlatSegmentResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - list_blob_flat_segment.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore + @distributed_trace def list_blob_hierarchy_segment( self, - delimiter, # type: str - prefix=None, # type: Optional[str] - marker=None, # type: Optional[str] - maxresults=None, # type: Optional[int] - include=None, # type: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "_models.ListBlobsHierarchySegmentResponse" + delimiter: str, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> _models.ListBlobsHierarchySegmentResponse: + # pylint: disable=line-too-long """[Update] The List Blobs operation returns a list of the blobs under the specified container. :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix element in the response body that acts as a placeholder for all blobs whose names begin with the same substring up to the appearance of the delimiter character. The delimiter may be a - single character or a string. + single character or a string. Required. :type delimiter: str :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. + specified prefix. Default value is None. :type prefix: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int :param include: Include this parameter to specify one or more datasets to include in the - response. + response. Default value is None. :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListBlobsHierarchySegmentResponse, or the result of cls(response) + :return: ListBlobsHierarchySegmentResponse or the result of cls(response) :rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsHierarchySegmentResponse"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - restype = "container" - comp = "list" - accept = "application/xml" - - # Construct URL - url = self.list_blob_hierarchy_segment.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if prefix is not None: - query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') - query_parameters['delimiter'] = self._serialize.query("delimiter", delimiter, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - if include is not None: - query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + cls: ClsType[_models.ListBlobsHierarchySegmentResponse] = kwargs.pop("cls", None) + + _request = build_list_blob_hierarchy_segment_request( + url=self._config.url, + delimiter=delimiter, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('ListBlobsHierarchySegmentResponse', pipeline_response) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - list_blob_hierarchy_segment.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore - def get_account_info( - self, - **kwargs # type: Any - ): - # type: (...) -> None + @distributed_trace + def get_account_info( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Returns the sku name and account kind. - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "account" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_account_info.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_get_account_info_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) - response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) if cls: - return cls(pipeline_response, None, response_headers) - - get_account_info.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_page_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_page_blob_operations.py index e7f8a0223351..96fd5d1c503f 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_page_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_page_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,75 +7,858 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from .. import models as _models +from .._serialization import Serializer + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_create_request( + url: str, + *, + content_length: int, + blob_content_length: int, + timeout: Optional[int] = None, + tier: Optional[Union[str, _models.PremiumPageBlobAccessTier]] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[bytes] = None, + blob_cache_control: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + blob_sequence_number: int = 0, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + blob_type: Literal["PageBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "PageBlob")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-blob-type"] = _SERIALIZER.header("blob_type", blob_type, "str") + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-blob-content-length"] = _SERIALIZER.header("blob_content_length", blob_content_length, "int") + if blob_sequence_number is not None: + _headers["x-ms-blob-sequence-number"] = _SERIALIZER.header("blob_sequence_number", blob_sequence_number, "int") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_upload_pages_request( + url: str, + *, + content_length: int, + content: IO[bytes], + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-page-write"] = _SERIALIZER.header("page_write", page_write, "str") + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "bytearray" + ) + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_sequence_number_less_than_or_equal_to is not None: + _headers["x-ms-if-sequence-number-le"] = _SERIALIZER.header( + "if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, "int" + ) + if if_sequence_number_less_than is not None: + _headers["x-ms-if-sequence-number-lt"] = _SERIALIZER.header( + "if_sequence_number_less_than", if_sequence_number_less_than, "int" + ) + if if_sequence_number_equal_to is not None: + _headers["x-ms-if-sequence-number-eq"] = _SERIALIZER.header( + "if_sequence_number_equal_to", if_sequence_number_equal_to, "int" + ) + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if structured_body_type is not None: + _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") + if structured_content_length is not None: + _headers["x-ms-structured-content-length"] = _SERIALIZER.header( + "structured_content_length", structured_content_length, "int" + ) + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_clear_pages_request( + url: str, + *, + content_length: int, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["clear"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "clear")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-page-write"] = _SERIALIZER.header("page_write", page_write, "str") + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_sequence_number_less_than_or_equal_to is not None: + _headers["x-ms-if-sequence-number-le"] = _SERIALIZER.header( + "if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, "int" + ) + if if_sequence_number_less_than is not None: + _headers["x-ms-if-sequence-number-lt"] = _SERIALIZER.header( + "if_sequence_number_less_than", if_sequence_number_less_than, "int" + ) + if if_sequence_number_equal_to is not None: + _headers["x-ms-if-sequence-number-eq"] = _SERIALIZER.header( + "if_sequence_number_equal_to", if_sequence_number_equal_to, "int" + ) + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_upload_pages_from_url_request( + url: str, + *, + source_url: str, + source_range: str, + content_length: int, + range: str, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, + timeout: Optional[int] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + lease_id: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-page-write"] = _SERIALIZER.header("page_write", page_write, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("source_url", source_url, "str") + _headers["x-ms-source-range"] = _SERIALIZER.header("source_range", source_range, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") + if source_contentcrc64 is not None: + _headers["x-ms-source-content-crc64"] = _SERIALIZER.header( + "source_contentcrc64", source_contentcrc64, "bytearray" + ) + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_sequence_number_less_than_or_equal_to is not None: + _headers["x-ms-if-sequence-number-le"] = _SERIALIZER.header( + "if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, "int" + ) + if if_sequence_number_less_than is not None: + _headers["x-ms-if-sequence-number-lt"] = _SERIALIZER.header( + "if_sequence_number_less_than", if_sequence_number_less_than, "int" + ) + if if_sequence_number_equal_to is not None: + _headers["x-ms-if-sequence-number-eq"] = _SERIALIZER.header( + "if_sequence_number_equal_to", if_sequence_number_equal_to, "int" + ) + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_page_ranges_request( + url: str, + *, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + + # Construct headers + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_page_ranges_diff_request( + url: str, + *, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + prevsnapshot: Optional[str] = None, + prev_snapshot_url: Optional[str] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if prevsnapshot is not None: + _params["prevsnapshot"] = _SERIALIZER.query("prevsnapshot", prevsnapshot, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + + # Construct headers + if prev_snapshot_url is not None: + _headers["x-ms-previous-snapshot-url"] = _SERIALIZER.header("prev_snapshot_url", prev_snapshot_url, "str") + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_resize_request( + url: str, + *, + blob_content_length: int, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-blob-content-length"] = _SERIALIZER.header("blob_content_length", blob_content_length, "int") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_sequence_number_request( + url: str, + *, + sequence_number_action: Union[str, _models.SequenceNumberActionType], + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + blob_sequence_number: int = 0, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-sequence-number-action"] = _SERIALIZER.header( + "sequence_number_action", sequence_number_action, "str" + ) + if blob_sequence_number is not None: + _headers["x-ms-blob-sequence-number"] = _SERIALIZER.header("blob_sequence_number", blob_sequence_number, "int") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_copy_incremental_request( + url: str, + *, + copy_source: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["incrementalcopy"] = kwargs.pop("comp", _params.pop("comp", "incrementalcopy")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class PageBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class PageBlobOperations(object): - """PageBlobOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`page_blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def create( + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - blob_content_length, # type: int - timeout=None, # type: Optional[int] - tier=None, # type: Optional[Union[str, "_models.PremiumPageBlobAccessTier"]] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - blob_sequence_number=0, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - blob_tags_string=None, # type: Optional[str] - blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + blob_content_length: int, + timeout: Optional[int] = None, + tier: Optional[Union[str, _models.PremiumPageBlobAccessTier]] = None, + metadata: Optional[Dict[str, str]] = None, + blob_sequence_number: int = 0, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Create operation creates a new page blob. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 - TB. The page blob size must be aligned to a 512-byte boundary. - :type blob_content_length: long + TB. The page blob size must be aligned to a 512-byte boundary. Required. + :type blob_content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param tier: Optional. Indicates the tier to be set on the page blob. + :param tier: Optional. Indicates the tier to be set on the page blob. Known values are: "P4", + "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", and "P80". Default value is None. :type tier: str or ~azure.storage.blob.models.PremiumPageBlobAccessTier :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -82,42 +866,55 @@ def create( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 - and 2^63 - 1. - :type blob_sequence_number: long + and 2^63 - 1. Default value is 0. + :type blob_sequence_number: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["PageBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "PageBlob")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -127,6 +924,7 @@ def create( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -134,184 +932,184 @@ def create( _if_none_match = None _if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - blob_type = "PageBlob" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_create_request( + url=self._config.url, + content_length=content_length, + blob_content_length=blob_content_length, + timeout=timeout, + tier=tier, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_sequence_number=blob_sequence_number, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + blob_type=blob_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long') - if blob_sequence_number is not None: - header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - create.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def upload_pages( + @distributed_trace + def upload_pages( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - body, # type: IO - transactional_content_md5=None, # type: Optional[bytearray] - transactional_content_crc64=None, # type: Optional[bytearray] - timeout=None, # type: Optional[int] - range=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - sequence_number_access_conditions=None, # type: Optional["_models.SequenceNumberAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + body: IO[bytes], + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Upload Pages operation writes a range of pages to a page blob. - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param sequence_number_access_conditions: Parameter group. - :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_sequence_number_less_than_or_equal_to = None _if_sequence_number_less_than = None @@ -321,169 +1119,169 @@ def upload_pages( _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_less_than_or_equal_to = ( + sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + ) if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if sequence_number_access_conditions is not None: - _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - comp = "page" - page_write = "update" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.upload_pages.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_sequence_number_less_than_or_equal_to is not None: - header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') - if _if_sequence_number_less_than is not None: - header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') - if _if_sequence_number_equal_to is not None: - header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = body + + _request = build_upload_pages_request( + url=self._config.url, + content_length=content_length, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + range=range, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + comp=comp, + page_write=page_write, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) - - upload_pages.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def clear_pages( + @distributed_trace + def clear_pages( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - timeout=None, # type: Optional[int] - range=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - sequence_number_access_conditions=None, # type: Optional["_models.SequenceNumberAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + timeout: Optional[int] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Clear Pages operation clears a set of pages from a page blob. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param sequence_number_access_conditions: Parameter group. - :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["clear"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "clear")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_sequence_number_less_than_or_equal_to = None _if_sequence_number_less_than = None @@ -493,177 +1291,174 @@ def clear_pages( _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_less_than_or_equal_to = ( + sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + ) if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if sequence_number_access_conditions is not None: - _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - comp = "page" - page_write = "clear" - accept = "application/xml" - - # Construct URL - url = self.clear_pages.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_sequence_number_less_than_or_equal_to is not None: - header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') - if _if_sequence_number_less_than is not None: - header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') - if _if_sequence_number_equal_to is not None: - header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_clear_pages_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + range=range, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + page_write=page_write, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - clear_pages.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def upload_pages_from_url( + @distributed_trace + def upload_pages_from_url( # pylint: disable=inconsistent-return-statements self, - source_url, # type: str - source_range, # type: str - content_length, # type: int - range, # type: str - source_content_md5=None, # type: Optional[bytearray] - source_contentcrc64=None, # type: Optional[bytearray] - timeout=None, # type: Optional[int] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - sequence_number_access_conditions=None, # type: Optional["_models.SequenceNumberAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + source_url: str, + source_range: str, + content_length: int, + range: str, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Upload Pages operation writes a range of pages to a page blob where the contents are read from a URL. - :param source_url: Specify a URL to the copy source. + :param source_url: Specify a URL to the copy source. Required. :type source_url: str :param source_range: Bytes of source data in the specified range. The length of this range - should match the ContentLength header and x-ms-range/Range destination range header. + should match the ContentLength header and x-ms-range/Range destination range header. Required. :type source_range: str - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param range: The range of bytes to which the source range would be written. The range should - be 512 aligned and range-end is required. + be 512 aligned and range-end is required. Required. :type range: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray + from the copy source. Default value is None. + :type source_content_md5: bytes :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. - :type source_contentcrc64: bytearray + read from the copy source. Default value is None. + :type source_contentcrc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param sequence_number_access_conditions: Parameter group. - :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _lease_id = None _if_sequence_number_less_than_or_equal_to = None @@ -679,165 +1474,175 @@ def upload_pages_from_url( _source_if_match = None _source_if_none_match = None if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_less_than_or_equal_to = ( + sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + ) if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if sequence_number_access_conditions is not None: - _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_unmodified_since = modified_access_conditions.if_unmodified_since if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match - comp = "page" - page_write = "update" - accept = "application/xml" - - # Construct URL - url = self.upload_pages_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') - header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if source_contentcrc64 is not None: - header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_sequence_number_less_than_or_equal_to is not None: - header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') - if _if_sequence_number_less_than is not None: - header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') - if _if_sequence_number_equal_to is not None: - header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_upload_pages_from_url_request( + url=self._config.url, + source_url=source_url, + source_range=source_range, + content_length=content_length, + range=range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + comp=comp, + page_write=page_write, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - upload_pages_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def get_page_ranges( self, - snapshot=None, # type: Optional[str] - timeout=None, # type: Optional[int] - range=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> "_models.PageList" + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> _models.PageList: + # pylint: disable=line-too-long """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a page blob. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PageList, or the result of cls(response) + :return: PageList or the result of cls(response) :rtype: ~azure.storage.blob.models.PageList - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) + cls: ClsType[_models.PageList] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -847,133 +1652,149 @@ def get_page_ranges( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "pagelist" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_get_page_ranges_request( + url=self._config.url, + snapshot=snapshot, + timeout=timeout, + range=range, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + marker=marker, + maxresults=maxresults, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_page_ranges.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('PageList', pipeline_response) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("PageList", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_page_ranges.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore + @distributed_trace def get_page_ranges_diff( self, - snapshot=None, # type: Optional[str] - timeout=None, # type: Optional[int] - prevsnapshot=None, # type: Optional[str] - prev_snapshot_url=None, # type: Optional[str] - range=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> "_models.PageList" + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + prevsnapshot: Optional[str] = None, + prev_snapshot_url: Optional[str] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> _models.PageList: + # pylint: disable=line-too-long """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were changed between target blob and previous snapshot. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param prevsnapshot: Optional in version 2015-07-08 and newer. The prevsnapshot parameter is a DateTime value that specifies that the response will contain only pages that were changed between target blob and previous snapshot. Changed pages include both updated and cleared pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is the older of the two. Note that incremental snapshots are currently supported only for blobs - created on or after January 1, 2016. + created on or after January 1, 2016. Default value is None. :type prevsnapshot: str :param prev_snapshot_url: Optional. This header is only supported in service versions 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The response will only contain pages that were changed between the target blob and its previous - snapshot. + snapshot. Default value is None. :type prev_snapshot_url: str - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PageList, or the result of cls(response) + :return: PageList or the result of cls(response) :rtype: ~azure.storage.blob.models.PageList - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) + cls: ClsType[_models.PageList] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -983,259 +1804,249 @@ def get_page_ranges_diff( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "pagelist" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_get_page_ranges_diff_request( + url=self._config.url, + snapshot=snapshot, + timeout=timeout, + prevsnapshot=prevsnapshot, + prev_snapshot_url=prev_snapshot_url, + range=range, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + marker=marker, + maxresults=maxresults, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_page_ranges_diff.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if prevsnapshot is not None: - query_parameters['prevsnapshot'] = self._serialize.query("prevsnapshot", prevsnapshot, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if prev_snapshot_url is not None: - header_parameters['x-ms-previous-snapshot-url'] = self._serialize.header("prev_snapshot_url", prev_snapshot_url, 'str') - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('PageList', pipeline_response) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("PageList", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_page_ranges_diff.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore - def resize( + @distributed_trace + def resize( # pylint: disable=inconsistent-return-statements self, - blob_content_length, # type: int - timeout=None, # type: Optional[int] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + blob_content_length: int, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Resize the Blob. :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 - TB. The page blob size must be aligned to a 512-byte boundary. - :type blob_content_length: long + TB. The page blob size must be aligned to a 512-byte boundary. Required. + :type blob_content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "properties" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_resize_request( + url=self._config.url, + blob_content_length=blob_content_length, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.resize.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - resize.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def update_sequence_number( + @distributed_trace + def update_sequence_number( # pylint: disable=inconsistent-return-statements self, - sequence_number_action, # type: Union[str, "_models.SequenceNumberActionType"] - timeout=None, # type: Optional[int] - blob_sequence_number=0, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + sequence_number_action: Union[str, _models.SequenceNumberActionType], + timeout: Optional[int] = None, + blob_sequence_number: int = 0, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Update the sequence number of the blob. :param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the request. This property applies to page blobs only. This property indicates how the service - should modify the blob's sequence number. + should modify the blob's sequence number. Known values are: "max", "update", and "increment". + Required. :type sequence_number_action: str or ~azure.storage.blob.models.SequenceNumberActionType :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 - and 2^63 - 1. - :type blob_sequence_number: long + and 2^63 - 1. Default value is 0. + :type blob_sequence_number: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -1245,81 +2056,69 @@ def update_sequence_number( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "properties" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_update_sequence_number_request( + url=self._config.url, + sequence_number_action=sequence_number_action, + timeout=timeout, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_sequence_number=blob_sequence_number, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.update_sequence_number.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-sequence-number-action'] = self._serialize.header("sequence_number_action", sequence_number_action, 'str') - if blob_sequence_number is not None: - header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - update_sequence_number.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def copy_incremental( + @distributed_trace + def copy_incremental( # pylint: disable=inconsistent-return-statements self, - copy_source, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + copy_source: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. The snapshot is copied such that only the differential changes between the previously copied snapshot are transferred to the destination. The copied snapshots are complete copies of @@ -1329,93 +2128,89 @@ def copy_incremental( :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["incrementalcopy"] = kwargs.pop("comp", _params.pop("comp", "incrementalcopy")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "incrementalcopy" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_copy_incremental_request( + url=self._config.url, + copy_source=copy_source, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.copy_incremental.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) if cls: - return cls(pipeline_response, None, response_headers) - - copy_incremental.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_patch.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_patch.py new file mode 100644 index 000000000000..71dde502c70f --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_patch.py @@ -0,0 +1,26 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + + from typing import List +__all__ = [] # type: List[str] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_service_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_service_operations.py index 72f7a73fdf50..85a930712ca5 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_service_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_generated/operations/_service_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,699 +6,1056 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Iterator, List, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from .. import models as _models +from .._serialization import Serializer + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_set_properties_request( + url: str, *, content: Any, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_get_properties_request( + url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_statistics_request( + url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_containers_segment_request( + url: str, + *, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListContainersIncludeType]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if prefix is not None: + _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_user_delegation_key_request( + url: str, *, content: Any, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_get_account_info_request( + url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_submit_batch_request( + url: str, + *, + content_length: int, + content: IO[bytes], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) + multipart_content_type: Optional[str] = kwargs.pop("multipart_content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if multipart_content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("multipart_content_type", multipart_content_type, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_filter_blobs_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if where is not None: + _params["where"] = _SERIALIZER.query("where", where, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class ServiceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, IO, List, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class ServiceOperations(object): - """ServiceOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`service` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def set_properties( + @distributed_trace + def set_properties( # pylint: disable=inconsistent-return-statements self, - storage_service_properties, # type: "_models.StorageServiceProperties" - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None + storage_service_properties: _models.StorageServiceProperties, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Sets properties for a storage account's Blob service endpoint, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. - :param storage_service_properties: The StorageService properties. + :param storage_service_properties: The StorageService properties. Required. :type storage_service_properties: ~azure.storage.blob.models.StorageServiceProperties :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "properties" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.set_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(storage_service_properties, 'StorageServiceProperties', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = self._serialize.body(storage_service_properties, "StorageServiceProperties", is_xml=True) + + _request = build_set_properties_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) if cls: - return cls(pipeline_response, None, response_headers) - - set_properties.metadata = {'url': '/'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def get_properties( - self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "_models.StorageServiceProperties" + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> _models.StorageServiceProperties: + # pylint: disable=line-too-long """gets the properties of a storage account's Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: StorageServiceProperties, or the result of cls(response) + :return: StorageServiceProperties or the result of cls(response) :rtype: ~azure.storage.blob.models.StorageServiceProperties - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceProperties"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[_models.StorageServiceProperties] = kwargs.pop("cls", None) + + _request = build_get_properties_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - deserialized = self._deserialize('StorageServiceProperties', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_properties.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace def get_statistics( - self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "_models.StorageServiceStats" + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> _models.StorageServiceStats: + # pylint: disable=line-too-long """Retrieves statistics related to replication for the Blob service. It is only available on the secondary location endpoint when read-access geo-redundant replication is enabled for the storage account. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: StorageServiceStats, or the result of cls(response) + :return: StorageServiceStats or the result of cls(response) :rtype: ~azure.storage.blob.models.StorageServiceStats - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceStats"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "stats" - accept = "application/xml" - - # Construct URL - url = self.get_statistics.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) + cls: ClsType[_models.StorageServiceStats] = kwargs.pop("cls", None) + + _request = build_get_statistics_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('StorageServiceStats', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("StorageServiceStats", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_statistics.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace def list_containers_segment( self, - prefix=None, # type: Optional[str] - marker=None, # type: Optional[str] - maxresults=None, # type: Optional[int] - include=None, # type: Optional[List[Union[str, "_models.ListContainersIncludeType"]]] - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "_models.ListContainersSegmentResponse" + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListContainersIncludeType]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> _models.ListContainersSegmentResponse: + # pylint: disable=line-too-long """The List Containers Segment operation returns a list of the containers under the specified account. :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. + specified prefix. Default value is None. :type prefix: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int :param include: Include this parameter to specify that the container's metadata be returned as - part of the response body. + part of the response body. Default value is None. :type include: list[str or ~azure.storage.blob.models.ListContainersIncludeType] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListContainersSegmentResponse, or the result of cls(response) + :return: ListContainersSegmentResponse or the result of cls(response) :rtype: ~azure.storage.blob.models.ListContainersSegmentResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListContainersSegmentResponse"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - comp = "list" - accept = "application/xml" - - # Construct URL - url = self.list_containers_segment.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if prefix is not None: - query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - if include is not None: - query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + cls: ClsType[_models.ListContainersSegmentResponse] = kwargs.pop("cls", None) + + _request = build_list_containers_segment_request( + url=self._config.url, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - deserialized = self._deserialize('ListContainersSegmentResponse', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = self._deserialize("ListContainersSegmentResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - list_containers_segment.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace def get_user_delegation_key( self, - key_info, # type: "_models.KeyInfo" - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "_models.UserDelegationKey" + key_info: _models.KeyInfo, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> _models.UserDelegationKey: + # pylint: disable=line-too-long """Retrieves a user delegation key for the Blob service. This is only a valid operation when using bearer token authentication. - :param key_info: + :param key_info: Key information. Required. :type key_info: ~azure.storage.blob.models.KeyInfo :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: UserDelegationKey, or the result of cls(response) + :return: UserDelegationKey or the result of cls(response) :rtype: ~azure.storage.blob.models.UserDelegationKey - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UserDelegationKey"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "userdelegationkey" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.get_user_delegation_key.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(key_info, 'KeyInfo', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[_models.UserDelegationKey] = kwargs.pop("cls", None) + + _content = self._serialize.body(key_info, "KeyInfo", is_xml=True) + + _request = build_get_user_delegation_key_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('UserDelegationKey', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("UserDelegationKey", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_user_delegation_key.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore - def get_account_info( - self, - **kwargs # type: Any - ): - # type: (...) -> None + @distributed_trace + def get_account_info( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Returns the sku name and account kind. - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "account" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_account_info.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_get_account_info_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) - response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) - response_headers['x-ms-is-hns-enabled']=self._deserialize('bool', response.headers.get('x-ms-is-hns-enabled')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) if cls: - return cls(pipeline_response, None, response_headers) - - get_account_info.metadata = {'url': '/'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def submit_batch( self, - content_length, # type: int - multipart_content_type, # type: str - body, # type: IO - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> IO + content_length: int, + body: IO[bytes], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> Iterator[bytes]: + # pylint: disable=line-too-long """The Batch operation allows multiple API calls to be embedded into a single HTTP request. - :param content_length: The length of the request. - :type content_length: long - :param multipart_content_type: Required. The value of this header must be multipart/mixed with - a batch boundary. Example header value: multipart/mixed; boundary=batch_:code:``. - :type multipart_content_type: str - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IO, or the result of cls(response) - :rtype: IO - :raises: ~azure.core.exceptions.HttpResponseError + :return: Iterator[bytes] or the result of cls(response) + :rtype: Iterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[IO] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - comp = "batch" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.submit_batch.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - header_parameters['Content-Type'] = self._serialize.header("multipart_content_type", multipart_content_type, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(body, 'IO', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) + multipart_content_type: str = kwargs.pop( + "multipart_content_type", _headers.pop("Content-Type", "application/xml") + ) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _content = body + + _request = build_submit_batch_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + request_id_parameter=request_id_parameter, + comp=comp, + multipart_content_type=multipart_content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - submit_batch.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace def filter_blobs( self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - where=None, # type: Optional[str] - marker=None, # type: Optional[str] - maxresults=None, # type: Optional[int] - **kwargs # type: Any - ): - # type: (...) -> "_models.FilterBlobSegment" + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, + **kwargs: Any + ) -> _models.FilterBlobSegment: + # pylint: disable=line-too-long """The Filter Blobs operation enables callers to list blobs across all containers whose tags match a given search expression. Filter blobs searches across all containers within a storage account but can be scoped within the expression to a single container. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param where: Filters the results to return only to return only blobs whose tags match the - specified expression. + specified expression. Default value is None. :type where: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int - :keyword callable cls: A custom type or function that will be passed the direct response - :return: FilterBlobSegment, or the result of cls(response) + :param include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :type include: list[str or ~azure.storage.blob.models.FilterBlobsIncludeItem] + :return: FilterBlobSegment or the result of cls(response) :rtype: ~azure.storage.blob.models.FilterBlobSegment - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.FilterBlobSegment"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "blobs" - accept = "application/xml" - - # Construct URL - url = self.filter_blobs.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if where is not None: - query_parameters['where'] = self._serialize.query("where", where, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) + cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) + + _request = build_filter_blobs_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + where=where, + marker=marker, + maxresults=maxresults, + include=include, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('FilterBlobSegment', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - filter_blobs.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_lease.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_lease.py index d495d6e2dfb9..b8b5684d7c23 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_lease.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_lease.py @@ -3,60 +3,55 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-keyword-should-match-keyword-only import uuid -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, TypeVar, TYPE_CHECKING -) +from typing import Any, Optional, Union, TYPE_CHECKING from azure.core.exceptions import HttpResponseError from azure.core.tracing.decorator import distributed_trace -from ._shared.response_handlers import return_response_headers, process_storage_error +from ._shared.response_handlers import process_storage_error, return_response_headers from ._serialize import get_modify_conditions if TYPE_CHECKING: + from azure.storage.blob import BlobClient, ContainerClient from datetime import datetime - BlobClient = TypeVar("BlobClient") - ContainerClient = TypeVar("ContainerClient") - -class BlobLeaseClient(object): +class BlobLeaseClient(): # pylint: disable=client-accepts-api-version-keyword """Creates a new BlobLeaseClient. This client provides lease operations on a BlobClient or ContainerClient. - - :ivar str id: - The ID of the lease currently being maintained. This will be `None` if no - lease has yet been acquired. - :ivar str etag: - The ETag of the lease currently being maintained. This will be `None` if no - lease has yet been acquired or modified. - :ivar ~datetime.datetime last_modified: - The last modified timestamp of the lease currently being maintained. - This will be `None` if no lease has yet been acquired or modified. - - :param client: - The client of the blob or container to lease. - :type client: ~azure.storage.blob.BlobClient or - ~azure.storage.blob.ContainerClient - :param str lease_id: - A string representing the lease ID of an existing lease. This value does not - need to be specified in order to acquire a new lease, or break one. + :param client: The client of the blob or container to lease. + :type client: Union[BlobClient, ContainerClient] + :param lease_id: A string representing the lease ID of an existing lease. This value does not need to be + specified in order to acquire a new lease, or break one. + :type lease_id: Optional[str] """ - def __init__( - self, client, lease_id=None - ): # pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs - # type: (Union[BlobClient, ContainerClient], Optional[str]) -> None + + id: str + """The ID of the lease currently being maintained. This will be `None` if no + lease has yet been acquired.""" + etag: Optional[str] + """The ETag of the lease currently being maintained. This will be `None` if no + lease has yet been acquired or modified.""" + last_modified: Optional["datetime"] + """The last modified timestamp of the lease currently being maintained. + This will be `None` if no lease has yet been acquired or modified.""" + + def __init__( # pylint: disable=missing-client-constructor-parameter-credential, missing-client-constructor-parameter-kwargs + self, client: Union["BlobClient", "ContainerClient"], + lease_id: Optional[str] = None + ) -> None: self.id = lease_id or str(uuid.uuid4()) self.last_modified = None self.etag = None if hasattr(client, 'blob_name'): - self._client = client._client.blob # type: ignore # pylint: disable=protected-access + self._client = client._client.blob elif hasattr(client, 'container_name'): - self._client = client._client.container # type: ignore # pylint: disable=protected-access + self._client = client._client.container else: raise TypeError("Lease must use either BlobClient or ContainerClient.") @@ -67,8 +62,7 @@ def __exit__(self, *args): self.release() @distributed_trace - def acquire(self, lease_duration=-1, **kwargs): - # type: (int, **Any) -> None + def acquire(self, lease_duration: int = -1, **kwargs: Any) -> None: """Requests a new lease. If the container does not have an active lease, the Blob service creates a @@ -103,12 +97,16 @@ def acquire(self, lease_duration=-1, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = self._client.acquire_lease( + response: Any = self._client.acquire_lease( timeout=kwargs.pop('timeout', None), duration=lease_duration, proposed_lease_id=self.id, @@ -117,13 +115,12 @@ def acquire(self, lease_duration=-1, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime - self.etag = response.get('etag') # type: str + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') + self.etag = response.get('etag') @distributed_trace - def renew(self, **kwargs): - # type: (Any) -> None + def renew(self, **kwargs: Any) -> None: """Renews the lease. The lease can be renewed if the lease ID specified in the @@ -156,12 +153,16 @@ def renew(self, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = self._client.renew_lease( + response: Any = self._client.renew_lease( lease_id=self.id, timeout=kwargs.pop('timeout', None), modified_access_conditions=mod_conditions, @@ -169,13 +170,12 @@ def renew(self, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.etag = response.get('etag') # type: str - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') @distributed_trace - def release(self, **kwargs): - # type: (Any) -> None + def release(self, **kwargs: Any) -> None: """Release the lease. The lease may be released if the client lease id specified matches @@ -206,12 +206,16 @@ def release(self, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = self._client.release_lease( + response: Any = self._client.release_lease( lease_id=self.id, timeout=kwargs.pop('timeout', None), modified_access_conditions=mod_conditions, @@ -219,13 +223,12 @@ def release(self, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.etag = response.get('etag') # type: str - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') @distributed_trace - def change(self, proposed_lease_id, **kwargs): - # type: (str, Any) -> None + def change(self, proposed_lease_id: str, **kwargs: Any) -> None: """Change the lease ID of an active lease. :param str proposed_lease_id: @@ -255,12 +258,16 @@ def change(self, proposed_lease_id, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = self._client.change_lease( + response: Any = self._client.change_lease( lease_id=self.id, proposed_lease_id=proposed_lease_id, timeout=kwargs.pop('timeout', None), @@ -269,13 +276,12 @@ def change(self, proposed_lease_id, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.etag = response.get('etag') # type: str - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') @distributed_trace - def break_lease(self, lease_break_period=None, **kwargs): - # type: (Optional[int], Any) -> int + def break_lease(self, lease_break_period: Optional[int] = None, **kwargs: Any) -> int: """Break the lease, if the container or blob has an active lease. Once a lease is broken, it cannot be renewed. Any authorized request can break the lease; @@ -314,7 +320,11 @@ def break_lease(self, lease_break_period=None, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: Approximate time remaining in the lease period, in seconds. :rtype: int """ diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_list_blobs_helper.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_list_blobs_helper.py index 309d37bd9583..5e357cea4fb4 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_list_blobs_helper.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_list_blobs_helper.py @@ -1,57 +1,74 @@ -# pylint: disable=too-many-lines # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -from azure.core.paging import PageIterator, ItemPaged +from typing import Any, Callable, cast, List, Optional, Tuple, Union +from urllib.parse import unquote + from azure.core.exceptions import HttpResponseError -from ._deserialize import get_blob_properties_from_generated_code, parse_tags +from azure.core.paging import ItemPaged, PageIterator + +from ._deserialize import ( + get_blob_properties_from_generated_code, + load_many_xml_nodes, + load_xml_int, + load_xml_string, + parse_tags +) from ._generated.models import BlobItemInternal, BlobPrefix as GenBlobPrefix, FilterBlobItem +from ._generated._serialization import Deserializer from ._models import BlobProperties, FilteredBlob from ._shared.models import DictMixin -from ._shared.response_handlers import return_context_and_deserialized, process_storage_error +from ._shared.response_handlers import ( + process_storage_error, + return_context_and_deserialized, + return_raw_deserialized +) + + +class IgnoreListBlobsDeserializer(Deserializer): + def __call__(self, target_obj, response_data, content_type=None): # pylint: disable=inconsistent-return-statements + if target_obj == "ListBlobsFlatSegmentResponse": + return None + super().__call__(target_obj, response_data, content_type) class BlobPropertiesPaged(PageIterator): - """An Iterable of Blob properties. + """An Iterable of Blob properties.""" + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + container: Optional[str] + """The container that the blobs are listed from.""" + delimiter: Optional[str] + """A delimiting character used for hierarchy listing.""" + command: Callable + """Function to retrieve the next page of items.""" - :ivar str service_endpoint: The service URL. - :ivar str prefix: A blob name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str continuation_token: The continuation token to retrieve the next page of results. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.BlobProperties) - :ivar str container: The container that the blobs are listed from. - :ivar str delimiter: A delimiting character used for hierarchy listing. - - :param callable command: Function to retrieve the next page of items. - :param str container: The name of the container. - :param str prefix: Filters the results to return only blobs whose names - begin with the specified prefix. - :param int results_per_page: The maximum number of blobs to retrieve per - call. - :param str continuation_token: An opaque continuation token. - :param str delimiter: - Used to capture blobs whose names begin with the same substring up to - the appearance of the delimiter character. The delimiter may be a single - character or a string. - :param location_mode: Specifies the location the request should be sent to. - This mode only applies for RA-GRS accounts which allow secondary read access. - Options include 'primary' or 'secondary'. - """ def __init__( - self, command, - container=None, - prefix=None, - results_per_page=None, - continuation_token=None, - delimiter=None, - location_mode=None): + self, command: Callable, + container: str, + prefix: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None, + delimiter: Optional[str] = None, + location_mode: Optional[str] = None, + ) -> None: super(BlobPropertiesPaged, self).__init__( get_next=self._get_next_cb, extract_data=self._extract_data_cb, @@ -79,7 +96,7 @@ def _get_next_cb(self, continuation_token): process_storage_error(error) def _extract_data_cb(self, get_next_return): - self.location_mode, self._response = get_next_return + self.location_mode, self._response = cast(Tuple[Optional[str], Any], get_next_return) self.service_endpoint = self._response.service_endpoint self.prefix = self._response.prefix self.marker = self._response.marker @@ -89,16 +106,89 @@ def _extract_data_cb(self, get_next_return): return self._response.next_marker or None, self.current_page - def _build_item(self, item): + def _build_item(self, item: Union[BlobItemInternal, BlobProperties]) -> BlobProperties: if isinstance(item, BlobProperties): return item if isinstance(item, BlobItemInternal): - blob = get_blob_properties_from_generated_code(item) # pylint: disable=protected-access - blob.container = self.container + blob = get_blob_properties_from_generated_code(item) + blob.container = self.container # type: ignore [assignment] return blob return item +class BlobNamesPaged(PageIterator): + """An Iterable of Blob names.""" + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of blobs to retrieve per call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + container: Optional[str] + """The container that the blobs are listed from.""" + delimiter: Optional[str] + """A delimiting character used for hierarchy listing.""" + command: Callable + """Function to retrieve the next page of items.""" + + def __init__( + self, command: Callable, + container: Optional[str] = None, + prefix: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None, + location_mode: Optional[str] = None + ) -> None: + super(BlobNamesPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.service_endpoint = None + self.prefix = prefix + self.marker = None + self.results_per_page = results_per_page + self.container = container + self.current_page = None + self.location_mode = location_mode + + def _get_next_cb(self, continuation_token): + try: + return self._command( + prefix=self.prefix, + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_raw_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.service_endpoint = self._response.get('ServiceEndpoint') + self.prefix = load_xml_string(self._response, 'Prefix') + self.marker = load_xml_string(self._response, 'Marker') + self.results_per_page = load_xml_int(self._response, 'MaxResults') + self.container = self._response.get('ContainerName') + + blobs = load_many_xml_nodes(self._response, 'Blob', wrapper='Blobs') + self.current_page = [load_xml_string(blob, 'Name') for blob in blobs] + + next_marker = load_xml_string(self._response, 'NextMarker') + return next_marker or None, self.current_page + + class BlobPrefixPaged(BlobPropertiesPaged): def __init__(self, *args, **kwargs): super(BlobPrefixPaged, self).__init__(*args, **kwargs) @@ -115,10 +205,14 @@ def _extract_data_cb(self, get_next_return): def _build_item(self, item): item = super(BlobPrefixPaged, self)._build_item(item) if isinstance(item, GenBlobPrefix): + if item.name.encoded: + name = unquote(item.name.content) + else: + name = item.name.content return BlobPrefix( self._command, container=self.container, - prefix=item.name, + prefix=name, results_per_page=self.results_per_page, location_mode=self.location_mode) return item @@ -128,74 +222,72 @@ class BlobPrefix(ItemPaged, DictMixin): """An Iterable of Blob properties. Returned from walk_blobs when a delimiter is used. - Can be thought of as a virtual blob directory. - - :ivar str name: The prefix, or "directory name" of the blob. - :ivar str service_endpoint: The service URL. - :ivar str prefix: A blob name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str next_marker: The continuation token to retrieve the next page of results. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.BlobProperties) - :ivar str container: The container that the blobs are listed from. - :ivar str delimiter: A delimiting character used for hierarchy listing. - - :param callable command: Function to retrieve the next page of items. - :param str prefix: Filters the results to return only blobs whose names - begin with the specified prefix. - :param int results_per_page: The maximum number of blobs to retrieve per - call. - :param str marker: An opaque continuation token. - :param str delimiter: - Used to capture blobs whose names begin with the same substring up to - the appearance of the delimiter character. The delimiter may be a single - character or a string. - :param location_mode: Specifies the location the request should be sent to. - This mode only applies for RA-GRS accounts which allow secondary read access. - Options include 'primary' or 'secondary'. - """ - def __init__(self, *args, **kwargs): + Can be thought of as a virtual blob directory.""" + + name: str + """The prefix, or "directory name" of the blob.""" + service_endpoint: Optional[str] + """The service URL.""" + prefix: str + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + next_marker: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: str + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + delimiter: str + """A delimiting character used for hierarchy listing.""" + command: Callable + """Function to retrieve the next page of items.""" + container: str + """The name of the container.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: super(BlobPrefix, self).__init__(*args, page_iterator_class=BlobPrefixPaged, **kwargs) - self.name = kwargs.get('prefix') - self.prefix = kwargs.get('prefix') + self.name = kwargs.get('prefix') # type: ignore [assignment] + self.prefix = kwargs.get('prefix') # type: ignore [assignment] self.results_per_page = kwargs.get('results_per_page') - self.container = kwargs.get('container') - self.delimiter = kwargs.get('delimiter') - self.location_mode = kwargs.get('location_mode') + self.container = kwargs.get('container') # type: ignore [assignment] + self.delimiter = kwargs.get('delimiter') # type: ignore [assignment] + self.location_mode = kwargs.get('location_mode') # type: ignore [assignment] class FilteredBlobPaged(PageIterator): - """An Iterable of Blob properties. + """An Iterable of Blob properties.""" + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + command: Callable + """Function to retrieve the next page of items.""" + container: Optional[str] + """The name of the container.""" - :ivar str service_endpoint: The service URL. - :ivar str prefix: A blob name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str continuation_token: The continuation token to retrieve the next page of results. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.FilteredBlob) - :ivar str container: The container that the blobs are listed from. - - :param callable command: Function to retrieve the next page of items. - :param str container: The name of the container. - :param int results_per_page: The maximum number of blobs to retrieve per - call. - :param str continuation_token: An opaque continuation token. - :param location_mode: Specifies the location the request should be sent to. - This mode only applies for RA-GRS accounts which allow secondary read access. - Options include 'primary' or 'secondary'. - """ def __init__( - self, command, - container=None, - results_per_page=None, - continuation_token=None, - location_mode=None): + self, command: Callable, + container: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None, + location_mode: Optional[str] = None + ) -> None: super(FilteredBlobPaged, self).__init__( get_next=self._get_next_cb, extract_data=self._extract_data_cb, diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_models.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_models.py index 1a8237cfea14..fbde3a808e1e 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_models.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_models.py @@ -7,49 +7,91 @@ # pylint: disable=super-init-not-called, too-many-lines from enum import Enum +from typing import Any, Callable, Dict, List, Optional, Union, TYPE_CHECKING +from azure.core import CaseInsensitiveEnumMeta from azure.core.paging import PageIterator from azure.core.exceptions import HttpResponseError -from ._generated.models import ArrowField -from ._shared import decode_base64_to_text +from ._shared import decode_base64_to_bytes from ._shared.response_handlers import return_context_and_deserialized, process_storage_error from ._shared.models import DictMixin, get_enum_value +from ._generated.models import AccessPolicy as GenAccessPolicy +from ._generated.models import ArrowField +from ._generated.models import CorsRule as GeneratedCorsRule from ._generated.models import Logging as GeneratedLogging from ._generated.models import Metrics as GeneratedMetrics from ._generated.models import RetentionPolicy as GeneratedRetentionPolicy from ._generated.models import StaticWebsite as GeneratedStaticWebsite -from ._generated.models import CorsRule as GeneratedCorsRule -from ._generated.models import AccessPolicy as GenAccessPolicy +if TYPE_CHECKING: + from datetime import datetime + from ._generated.models import PageList -class BlobType(str, Enum): +# Parse a generated PageList into a single list of PageRange sorted by start. +def parse_page_list(page_list: "PageList") -> List["PageRange"]: - BlockBlob = "BlockBlob" - PageBlob = "PageBlob" - AppendBlob = "AppendBlob" + page_ranges = page_list.page_range + clear_ranges = page_list.clear_range + if page_ranges is None: + raise ValueError("PageList's 'page_range' is malformed or None.") + if clear_ranges is None: + raise ValueError("PageList's 'clear_ranges' is malformed or None.") -class BlockState(str, Enum): + ranges = [] + p_i, c_i = 0, 0 + + # Combine page ranges and clear ranges into single list, sorted by start + while p_i < len(page_ranges) and c_i < len(clear_ranges): + p, c = page_ranges[p_i], clear_ranges[c_i] + + if p.start < c.start: + ranges.append( + PageRange(start=p.start, end=p.end, cleared=False) + ) + p_i += 1 + else: + ranges.append( + PageRange(start=c.start, end=c.end, cleared=True) + ) + c_i += 1 + + # Grab remaining elements in either list + ranges += [PageRange(start=r.start, end=r.end, cleared=False) for r in page_ranges[p_i:]] + ranges += [PageRange(start=r.start, end=r.end, cleared=True) for r in clear_ranges[c_i:]] + + return ranges + + +class BlobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + BLOCKBLOB = "BlockBlob" + PAGEBLOB = "PageBlob" + APPENDBLOB = "AppendBlob" + + +class BlockState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Block blob block types.""" - Committed = 'Committed' #: Committed blocks. - Latest = 'Latest' #: Latest blocks. - Uncommitted = 'Uncommitted' #: Uncommitted blocks. + COMMITTED = 'Committed' #: Committed blocks. + LATEST = 'Latest' #: Latest blocks. + UNCOMMITTED = 'Uncommitted' #: Uncommitted blocks. -class StandardBlobTier(str, Enum): +class StandardBlobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ Specifies the blob tier to set the blob to. This is only applicable for block blobs on standard storage accounts. """ - Archive = 'Archive' #: Archive - Cool = 'Cool' #: Cool - Hot = 'Hot' #: Hot + ARCHIVE = 'Archive' #: Archive + COOL = 'Cool' #: Cool + COLD = 'Cold' #: Cold + HOT = 'Hot' #: Hot -class PremiumPageBlobTier(str, Enum): +class PremiumPageBlobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. Please take a look at: @@ -60,6 +102,7 @@ class PremiumPageBlobTier(str, Enum): P4 = 'P4' #: P4 Tier P6 = 'P6' #: P6 Tier P10 = 'P10' #: P10 Tier + P15 = 'P15' #: P15 Tier P20 = 'P20' #: P20 Tier P30 = 'P30' #: P30 Tier P40 = 'P40' #: P40 Tier @@ -67,26 +110,34 @@ class PremiumPageBlobTier(str, Enum): P60 = 'P60' #: P60 Tier -class SequenceNumberAction(str, Enum): +class QuickQueryDialect(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Specifies the quick query input/output dialect.""" + + DELIMITEDTEXT = 'DelimitedTextDialect' + DELIMITEDJSON = 'DelimitedJsonDialect' + PARQUET = 'ParquetDialect' + + +class SequenceNumberAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Sequence number actions.""" - Increment = 'increment' + INCREMENT = 'increment' """ Increments the value of the sequence number by 1. If specifying this option, do not include the x-ms-blob-sequence-number header. """ - Max = 'max' + MAX = 'max' """ Sets the sequence number to be the higher of the value included with the request and the value currently stored for the blob. """ - Update = 'update' + UPDATE = 'update' """Sets the sequence number to the value included with the request.""" -class PublicAccess(str, Enum): +class PublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ Specifies whether data in the container may be accessed publicly and the level of access. """ @@ -97,14 +148,14 @@ class PublicAccess(str, Enum): Clients cannot enumerate the containers within the storage account as well as the blobs within the container. """ - Blob = 'blob' + BLOB = 'blob' """ Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request. """ - Container = 'container' + CONTAINER = 'container' """ Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers @@ -112,6 +163,48 @@ class PublicAccess(str, Enum): """ +class BlobImmutabilityPolicyMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ + Specifies the immutability policy mode to set on the blob. + "Mutable" can only be returned by service, don't set to "Mutable". + """ + + UNLOCKED = "Unlocked" + LOCKED = "Locked" + MUTABLE = "Mutable" + + +class RetentionPolicy(GeneratedRetentionPolicy): + """The retention policy which determines how long the associated data should + persist. + + :param bool enabled: + Indicates whether a retention policy is enabled for the storage service. + The default value is False. + :param Optional[int] days: + Indicates the number of days that metrics or logging or + soft-deleted data should be retained. All data older than this value will + be deleted. If enabled=True, the number of days must be specified. + """ + + enabled: bool = False + days: Optional[int] = None + + def __init__(self, enabled: bool = False, days: Optional[int] = None) -> None: + super(RetentionPolicy, self).__init__(enabled=enabled, days=days, allow_permanent_delete=None) + if self.enabled and (self.days is None): + raise ValueError("If policy is enabled, 'days' must be specified.") + + @classmethod + def _from_generated(cls, generated): + if not generated: + return cls() + return cls( + enabled=generated.enabled, + days=generated.days, + ) + + class BlobAnalyticsLogging(GeneratedLogging): """Azure Analytics Logging settings. @@ -128,8 +221,19 @@ class BlobAnalyticsLogging(GeneratedLogging): policy will be disabled by default. """ - def __init__(self, **kwargs): - self.version = kwargs.get('version', u'1.0') + version: str = '1.0' + """The version of Storage Analytics to configure.""" + delete: bool = False + """Indicates whether all delete requests should be logged.""" + read: bool = False + """Indicates whether all read requests should be logged.""" + write: bool = False + """Indicates whether all write requests should be logged.""" + retention_policy: RetentionPolicy = RetentionPolicy() + """Determines how long the associated data should persist.""" + + def __init__(self, **kwargs: Any) -> None: + self.version = kwargs.get('version', '1.0') self.delete = kwargs.get('delete', False) self.read = kwargs.get('read', False) self.write = kwargs.get('write', False) @@ -164,8 +268,17 @@ class Metrics(GeneratedMetrics): policy will be disabled by default. """ - def __init__(self, **kwargs): - self.version = kwargs.get('version', u'1.0') + version: str = '1.0' + """The version of Storage Analytics to configure.""" + enabled: bool = False + """Indicates whether metrics are enabled for the Blob service.""" + include_apis: Optional[bool] + """Indicates whether metrics should generate summary statistics for called API operations.""" + retention_policy: RetentionPolicy = RetentionPolicy() + """Determines how long the associated data should persist.""" + + def __init__(self, **kwargs: Any) -> None: + self.version = kwargs.get('version', '1.0') self.enabled = kwargs.get('enabled', False) self.include_apis = kwargs.get('include_apis') self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy() @@ -182,34 +295,6 @@ def _from_generated(cls, generated): ) -class RetentionPolicy(GeneratedRetentionPolicy): - """The retention policy which determines how long the associated data should - persist. - - :param bool enabled: - Indicates whether a retention policy is enabled for the storage service. - The default value is False. - :param int days: - Indicates the number of days that metrics or logging or - soft-deleted data should be retained. All data older than this value will - be deleted. If enabled=True, the number of days must be specified. - """ - - def __init__(self, enabled=False, days=None): - super(RetentionPolicy, self).__init__(enabled=enabled, days=days, allow_permanent_delete=None) - if self.enabled and (self.days is None): - raise ValueError("If policy is enabled, 'days' must be specified.") - - @classmethod - def _from_generated(cls, generated): - if not generated: - return cls() - return cls( - enabled=generated.enabled, - days=generated.days, - ) - - class StaticWebsite(GeneratedStaticWebsite): """The properties that enable an account to host a static website. @@ -224,7 +309,16 @@ class StaticWebsite(GeneratedStaticWebsite): Absolute path of the default index page. """ - def __init__(self, **kwargs): + enabled: bool = False + """Indicates whether this account is hosting a static website.""" + index_document: Optional[str] + """The default name of the index page under each directory.""" + error_document404_path: Optional[str] + """The absolute path of the custom 404 page.""" + default_index_document_path: Optional[str] + """Absolute path of the default index page.""" + + def __init__(self, **kwargs: Any) -> None: self.enabled = kwargs.get('enabled', False) if self.enabled: self.index_document = kwargs.get('index_document') @@ -275,13 +369,45 @@ class CorsRule(GeneratedCorsRule): preflight response. """ - def __init__(self, allowed_origins, allowed_methods, **kwargs): + allowed_origins: str + """The comma-delimited string representation of the list of origin domains that will be allowed via + CORS, or "*" to allow all domains.""" + allowed_methods: str + """The comma-delimited string representation of the list HTTP methods that are allowed to be executed + by the origin.""" + exposed_headers: str + """The comma-delimited string representation of the list of response headers to expose to CORS clients.""" + allowed_headers: str + """The comma-delimited string representation of the list of headers allowed to be part of the cross-origin + request.""" + max_age_in_seconds: int + """The number of seconds that the client/browser should cache a pre-flight response.""" + + def __init__(self, allowed_origins: List[str], allowed_methods: List[str], **kwargs: Any) -> None: self.allowed_origins = ','.join(allowed_origins) self.allowed_methods = ','.join(allowed_methods) self.allowed_headers = ','.join(kwargs.get('allowed_headers', [])) self.exposed_headers = ','.join(kwargs.get('exposed_headers', [])) self.max_age_in_seconds = kwargs.get('max_age_in_seconds', 0) + @staticmethod + def _to_generated(rules: Optional[List["CorsRule"]]) -> Optional[List[GeneratedCorsRule]]: + if rules is None: + return rules + + generated_cors_list = [] + for cors_rule in rules: + generated_cors = GeneratedCorsRule( + allowed_origins=cors_rule.allowed_origins, + allowed_methods=cors_rule.allowed_methods, + allowed_headers=cors_rule.allowed_headers, + exposed_headers=cors_rule.exposed_headers, + max_age_in_seconds=cors_rule.max_age_in_seconds + ) + generated_cors_list.append(generated_cors) + + return generated_cors_list + @classmethod def _from_generated(cls, generated): return cls( @@ -298,39 +424,46 @@ class ContainerProperties(DictMixin): Returned ``ContainerProperties`` instances expose these values through a dictionary interface, for example: ``container_props["last_modified"]``. - Additionally, the container name is available as ``container_props["name"]``. - - :ivar ~datetime.datetime last_modified: - A datetime object representing the last time the container was modified. - :ivar str etag: - The ETag contains a value that you can use to perform operations - conditionally. - :ivar ~azure.storage.blob.LeaseProperties lease: - Stores all the lease information for the container. - :ivar str public_access: Specifies whether data in the container may be accessed - publicly and the level of access. - :ivar bool has_immutability_policy: - Represents whether the container has an immutability policy. - :ivar bool has_legal_hold: - Represents whether the container has a legal hold. - :ivar dict metadata: A dict with name-value pairs to associate with the - container as metadata. - :ivar ~azure.storage.blob.ContainerEncryptionScope encryption_scope: - The default encryption scope configuration for the container. - """ - - def __init__(self, **kwargs): - self.name = None - self.last_modified = kwargs.get('Last-Modified') - self.etag = kwargs.get('ETag') + Additionally, the container name is available as ``container_props["name"]``.""" + + name: str + """Name of the container.""" + last_modified: "datetime" + """A datetime object representing the last time the container was modified.""" + etag: str + """The ETag contains a value that you can use to perform operations conditionally.""" + lease: "LeaseProperties" + """Stores all the lease information for the container.""" + public_access: Optional[str] + """Specifies whether data in the container may be accessed publicly and the level of access.""" + has_immutability_policy: bool + """Represents whether the container has an immutability policy.""" + has_legal_hold: bool + """Represents whether the container has a legal hold.""" + immutable_storage_with_versioning_enabled: bool + """Represents whether immutable storage with versioning enabled on the container.""" + metadata: Dict[str, Any] + """A dict with name-value pairs to associate with the container as metadata.""" + encryption_scope: Optional["ContainerEncryptionScope"] + """The default encryption scope configuration for the container.""" + deleted: Optional[bool] + """Whether this container was deleted.""" + version: Optional[str] + """The version of a deleted container.""" + + def __init__(self, **kwargs: Any) -> None: + self.name = None # type: ignore [assignment] + self.last_modified = kwargs.get('Last-Modified') # type: ignore [assignment] + self.etag = kwargs.get('ETag') # type: ignore [assignment] self.lease = LeaseProperties(**kwargs) self.public_access = kwargs.get('x-ms-blob-public-access') - self.has_immutability_policy = kwargs.get('x-ms-has-immutability-policy') + self.has_immutability_policy = kwargs.get('x-ms-has-immutability-policy') # type: ignore [assignment] self.deleted = None self.version = None - self.has_legal_hold = kwargs.get('x-ms-has-legal-hold') - self.metadata = kwargs.get('metadata') + self.has_legal_hold = kwargs.get('x-ms-has-legal-hold') # type: ignore [assignment] + self.metadata = kwargs.get('metadata') # type: ignore [assignment] self.encryption_scope = None + self.immutable_storage_with_versioning_enabled = kwargs.get('x-ms-immutable-storage-with-versioning-enabled') # type: ignore [assignment] # pylint: disable=name-too-long default_encryption_scope = kwargs.get('x-ms-default-encryption-scope') if default_encryption_scope: self.encryption_scope = ContainerEncryptionScope( @@ -347,6 +480,7 @@ def _from_generated(cls, generated): props.lease = LeaseProperties._from_generated(generated) # pylint: disable=protected-access props.public_access = generated.properties.public_access props.has_immutability_policy = generated.properties.has_immutability_policy + props.immutable_storage_with_versioning_enabled = generated.properties.is_immutable_storage_with_versioning_enabled # pylint: disable=line-too-long, name-too-long props.deleted = generated.deleted props.version = generated.version props.has_legal_hold = generated.properties.has_legal_hold @@ -358,24 +492,34 @@ def _from_generated(cls, generated): class ContainerPropertiesPaged(PageIterator): """An Iterable of Container properties. - :ivar str service_endpoint: The service URL. - :ivar str prefix: A container name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str continuation_token: The continuation token to retrieve the next page of results. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.ContainerProperties) - - :param callable command: Function to retrieve the next page of items. - :param str prefix: Filters the results to return only containers whose names + :param Callable command: Function to retrieve the next page of items. + :param Optional[str] prefix: Filters the results to return only containers whose names begin with the specified prefix. - :param int results_per_page: The maximum number of container names to retrieve per - call. - :param str continuation_token: An opaque continuation token. + :param Optional[int] results_per_page: The maximum number of container names to retrieve per call. + :param Optional[str] continuation_token: An opaque continuation token. """ - def __init__(self, command, prefix=None, results_per_page=None, continuation_token=None): + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A container name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results.""" + current_page: List["ContainerProperties"] + """The current page of listed results.""" + + def __init__( + self, command: Callable, + prefix: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None + ) -> None: super(ContainerPropertiesPaged, self).__init__( get_next=self._get_next_cb, extract_data=self._extract_data_cb, @@ -414,177 +558,64 @@ def _build_item(item): return ContainerProperties._from_generated(item) # pylint: disable=protected-access -class BlobProperties(DictMixin): - """ - Blob Properties. - - :ivar str name: - The name of the blob. - :ivar str container: - The container in which the blob resides. - :ivar str snapshot: - Datetime value that uniquely identifies the blob snapshot. - :ivar ~azure.blob.storage.BlobType blob_type: - String indicating this blob's type. - :ivar dict metadata: - Name-value pairs associated with the blob as metadata. - :ivar ~datetime.datetime last_modified: - A datetime object representing the last time the blob was modified. - :ivar str etag: - The ETag contains a value that you can use to perform operations - conditionally. - :ivar int size: - The size of the content returned. If the entire blob was requested, - the length of blob in bytes. If a subset of the blob was requested, the - length of the returned subset. - :ivar str content_range: - Indicates the range of bytes returned in the event that the client - requested a subset of the blob. - :ivar int append_blob_committed_block_count: - (For Append Blobs) Number of committed blocks in the blob. - :ivar bool is_append_blob_sealed: - Indicate if the append blob is sealed or not. - - .. versionadded:: 12.4.0 - - :ivar int page_blob_sequence_number: - (For Page Blobs) Sequence number for page blob used for coordinating - concurrent writes. - :ivar bool server_encrypted: - Set to true if the blob is encrypted on the server. - :ivar ~azure.storage.blob.CopyProperties copy: - Stores all the copy properties for the blob. - :ivar ~azure.storage.blob.ContentSettings content_settings: - Stores all the content settings for the blob. - :ivar ~azure.storage.blob.LeaseProperties lease: - Stores all the lease information for the blob. - :ivar ~azure.storage.blob.StandardBlobTier blob_tier: - Indicates the access tier of the blob. The hot tier is optimized - for storing data that is accessed frequently. The cool storage tier - is optimized for storing data that is infrequently accessed and stored - for at least a month. The archive tier is optimized for storing - data that is rarely accessed and stored for at least six months - with flexible latency requirements. - :ivar str rehydrate_priority: - Indicates the priority with which to rehydrate an archived blob - :ivar ~datetime.datetime blob_tier_change_time: - Indicates when the access tier was last changed. - :ivar bool blob_tier_inferred: - Indicates whether the access tier was inferred by the service. - If false, it indicates that the tier was set explicitly. - :ivar bool deleted: - Whether this blob was deleted. - :ivar ~datetime.datetime deleted_time: - A datetime object representing the time at which the blob was deleted. - :ivar int remaining_retention_days: - The number of days that the blob will be retained before being permanently deleted by the service. - :ivar ~datetime.datetime creation_time: - Indicates when the blob was created, in UTC. - :ivar str archive_status: - Archive status of blob. - :ivar str encryption_key_sha256: - The SHA-256 hash of the provided encryption key. - :ivar str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - :ivar bool request_server_encrypted: - Whether this blob is encrypted. - :ivar list(~azure.storage.blob.ObjectReplicationPolicy) object_replication_source_properties: - Only present for blobs that have policy ids and rule ids applied to them. - - .. versionadded:: 12.4.0 - - :ivar str object_replication_destination_policy: - Represents the Object Replication Policy Id that created this blob. - - .. versionadded:: 12.4.0 +class ImmutabilityPolicy(DictMixin): + """Optional parameters for setting the immutability policy of a blob, blob snapshot or blob version. - :ivar ~datetime.datetime last_accessed_on: - Indicates when the last Read/Write operation was performed on a Blob. + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. - .. versionadded:: 12.6.0 - - :ivar int tag_count: - Tags count on this blob. + :keyword ~datetime.datetime expiry_time: + Specifies the date time when the blobs immutability policy is set to expire. + :keyword str or ~azure.storage.blob.BlobImmutabilityPolicyMode policy_mode: + Specifies the immutability policy mode to set on the blob. + Possible values to set include: "Locked", "Unlocked". + "Mutable" can only be returned by service, don't set to "Mutable". + """ - .. versionadded:: 12.4.0 + expiry_time: Optional["datetime"] = None + """Specifies the date time when the blobs immutability policy is set to expire.""" + policy_mode: Optional[str] = None + """Specifies the immutability policy mode to set on the blob.""" - :ivar dict(str, str) tags: - Key value pair of tags on this blob. + def __init__(self, **kwargs: Any) -> None: + self.expiry_time = kwargs.pop('expiry_time', None) + self.policy_mode = kwargs.pop('policy_mode', None) - .. versionadded:: 12.4.0 + @classmethod + def _from_generated(cls, generated): + immutability_policy = cls() + immutability_policy.expiry_time = generated.properties.immutability_policy_expires_on + immutability_policy.policy_mode = generated.properties.immutability_policy_mode + return immutability_policy - """ - def __init__(self, **kwargs): - self.name = kwargs.get('name') - self.container = None - self.snapshot = kwargs.get('x-ms-snapshot') - self.version_id = kwargs.get('x-ms-version-id') - self.is_current_version = kwargs.get('x-ms-is-current-version') - self.blob_type = BlobType(kwargs['x-ms-blob-type']) if kwargs.get('x-ms-blob-type') else None - self.metadata = kwargs.get('metadata') - self.encrypted_metadata = kwargs.get('encrypted_metadata') - self.last_modified = kwargs.get('Last-Modified') - self.etag = kwargs.get('ETag') - self.size = kwargs.get('Content-Length') - self.content_range = kwargs.get('Content-Range') - self.append_blob_committed_block_count = kwargs.get('x-ms-blob-committed-block-count') - self.is_append_blob_sealed = kwargs.get('x-ms-blob-sealed') - self.page_blob_sequence_number = kwargs.get('x-ms-blob-sequence-number') - self.server_encrypted = kwargs.get('x-ms-server-encrypted') - self.copy = CopyProperties(**kwargs) - self.content_settings = ContentSettings(**kwargs) - self.lease = LeaseProperties(**kwargs) - self.blob_tier = kwargs.get('x-ms-access-tier') - self.rehydrate_priority = kwargs.get('x-ms-rehydrate-priority') - self.blob_tier_change_time = kwargs.get('x-ms-access-tier-change-time') - self.blob_tier_inferred = kwargs.get('x-ms-access-tier-inferred') - self.deleted = False - self.deleted_time = None - self.remaining_retention_days = None - self.creation_time = kwargs.get('x-ms-creation-time') - self.archive_status = kwargs.get('x-ms-archive-status') - self.encryption_key_sha256 = kwargs.get('x-ms-encryption-key-sha256') - self.encryption_scope = kwargs.get('x-ms-encryption-scope') - self.request_server_encrypted = kwargs.get('x-ms-server-encrypted') - self.object_replication_source_properties = kwargs.get('object_replication_source_properties') - self.object_replication_destination_policy = kwargs.get('x-ms-or-policy-id') - self.last_accessed_on = kwargs.get('x-ms-last-access-time') - self.tag_count = kwargs.get('x-ms-tag-count') - self.tags = None +class FilteredBlob(DictMixin): + """Blob info from a Filter Blobs API call.""" + name: str + """Blob name""" + container_name: Optional[str] + """Container name.""" + tags: Optional[Dict[str, str]] + """Key value pairs of blob tags.""" -class FilteredBlob(DictMixin): - """Blob info from a Filter Blobs API call. - - :ivar name: Blob name - :type name: str - :ivar container_name: Container name. - :type container_name: str - :ivar tags: Key value pairs of blob tags. - :type tags: Dict[str, str] - """ - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: self.name = kwargs.get('name', None) self.container_name = kwargs.get('container_name', None) self.tags = kwargs.get('tags', None) class LeaseProperties(DictMixin): - """Blob Lease Properties. - - :ivar str status: - The lease status of the blob. Possible values: locked|unlocked - :ivar str state: - Lease state of the blob. Possible values: available|leased|expired|breaking|broken - :ivar str duration: - When a blob is leased, specifies whether the lease is of infinite or fixed duration. - """ + """Blob Lease Properties.""" + + status: str + """The lease status of the blob. Possible values: locked|unlocked""" + state: str + """Lease state of the blob. Possible values: available|leased|expired|breaking|broken""" + duration: Optional[str] + """When a blob is leased, specifies whether the lease is of infinite or fixed duration.""" - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: self.status = get_enum_value(kwargs.get('x-ms-lease-status')) self.state = get_enum_value(kwargs.get('x-ms-lease-state')) self.duration = get_enum_value(kwargs.get('x-ms-lease-duration')) @@ -601,33 +632,51 @@ def _from_generated(cls, generated): class ContentSettings(DictMixin): """The content settings of a blob. - :param str content_type: + :param Optional[str] content_type: The content type specified for the blob. If no content type was specified, the default content type is application/octet-stream. - :param str content_encoding: + :param Optional[str] content_encoding: If the content_encoding has previously been set for the blob, that value is stored. - :param str content_language: + :param Optional[str] content_language: If the content_language has previously been set for the blob, that value is stored. - :param str content_disposition: + :param Optional[str] content_disposition: content_disposition conveys additional information about how to process the response payload, and also can be used to attach additional metadata. If content_disposition has previously been set for the blob, that value is stored. - :param str cache_control: + :param Optional[str] cache_control: If the cache_control has previously been set for the blob, that value is stored. - :param str content_md5: + :param Optional[bytearray] content_md5: If the content_md5 has been set for the blob, this response header is stored so that the client can check for message content integrity. """ + content_type: Optional[str] = None + """The content type specified for the blob.""" + content_encoding: Optional[str] = None + """The content encoding specified for the blob.""" + content_language: Optional[str] = None + """The content language specified for the blob.""" + content_disposition: Optional[str] = None + """The content disposition specified for the blob.""" + cache_control: Optional[str] = None + """The cache control specified for the blob.""" + content_md5: Optional[bytearray] = None + """The content md5 specified for the blob.""" + def __init__( - self, content_type=None, content_encoding=None, - content_language=None, content_disposition=None, - cache_control=None, content_md5=None, **kwargs): + self, content_type: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_disposition: Optional[str] = None, + cache_control: Optional[str] = None, + content_md5: Optional[bytearray] = None, + **kwargs: Any + ) -> None: self.content_type = content_type or kwargs.get('Content-Type') self.content_encoding = content_encoding or kwargs.get('Content-Encoding') @@ -654,51 +703,47 @@ class CopyProperties(DictMixin): These properties will be `None` if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation, for example, using Set Blob Properties, Upload Blob, or Commit Block List. + """ - :ivar str id: - String identifier for the last attempted Copy Blob operation where this blob - was the destination blob. - :ivar str source: - URL up to 2 KB in length that specifies the source blob used in the last attempted - Copy Blob operation where this blob was the destination blob. - :ivar str status: - State of the copy operation identified by Copy ID, with these values: - success: - Copy completed successfully. - pending: - Copy is in progress. Check copy_status_description if intermittent, - non-fatal errors impede copy progress but don't cause failure. - aborted: - Copy was ended by Abort Copy Blob. - failed: - Copy failed. See copy_status_description for failure details. - :ivar str progress: - Contains the number of bytes copied and the total bytes in the source in the last + id: Optional[str] + """String identifier for the last attempted Copy Blob operation where this blob + was the destination blob.""" + source: Optional[str] + """URL up to 2 KB in length that specifies the source blob used in the last attempted + Copy Blob operation where this blob was the destination blob.""" + status: Optional[str] + """State of the copy operation identified by Copy ID, with these values: + success: Copy completed successfully. + pending: Copy is in progress. Check copy_status_description if intermittent, non-fatal errors impede copy progress + but don't cause failure. + aborted: Copy was ended by Abort Copy Blob. + failed: Copy failed. See copy_status_description for failure details.""" + progress: Optional[str] + """Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show - between 0 and Content-Length bytes copied. - :ivar ~datetime.datetime completion_time: - Conclusion time of the last attempted Copy Blob operation where this blob was the + between 0 and Content-Length bytes copied.""" + completion_time: Optional["datetime"] + """Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or - failed copy attempt. - :ivar str status_description: - Only appears when x-ms-copy-status is failed or pending. Describes cause of fatal - or non-fatal copy operation failure. - :ivar bool incremental_copy: - Copies the snapshot of the source page blob to a destination page blob. + failed copy attempt.""" + status_description: Optional[str] + """Only appears when x-ms-copy-status is failed or pending. Describes cause of fatal + or non-fatal copy operation failure.""" + incremental_copy: Optional[bool] + """Copies the snapshot of the source page blob to a destination page blob. The snapshot is copied such that only the differential changes between - the previously copied snapshot are transferred to the destination - :ivar ~datetime.datetime destination_snapshot: - Included if the blob is incremental copy blob or incremental copy snapshot, + the previously copied snapshot are transferred to the destination.""" + destination_snapshot: Optional["datetime"] + """Included if the blob is incremental copy blob or incremental copy snapshot, if x-ms-copy-status is success. Snapshot time of the last successful - incremental copy snapshot for this blob. - """ + incremental copy snapshot for this blob.""" - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: self.id = kwargs.get('x-ms-copy-id') self.source = kwargs.get('x-ms-copy-source') self.status = get_enum_value(kwargs.get('x-ms-copy-status')) self.progress = kwargs.get('x-ms-copy-progress') - self.completion_time = kwargs.get('x-ms-copy-completion_time') + self.completion_time = kwargs.get('x-ms-copy-completion-time') self.status_description = kwargs.get('x-ms-copy-status-description') self.incremental_copy = kwargs.get('x-ms-incremental-copy') self.destination_snapshot = kwargs.get('x-ms-copy-destination-snapshot') @@ -722,20 +767,33 @@ class BlobBlock(DictMixin): :param str block_id: Block id. - :param str state: - Block state. Possible values: committed|uncommitted - :ivar int size: - Block size in bytes. + :param BlockState state: + Block state. Possible values: BlockState.COMMITTED | BlockState.UNCOMMITTED """ - def __init__(self, block_id, state=BlockState.Latest): + block_id: str + """Block id.""" + state: BlockState + """Block state.""" + size: int + """Block size.""" + + def __init__(self, block_id: str, state: BlockState = BlockState.LATEST) -> None: self.id = block_id self.state = state - self.size = None + self.size = None # type: ignore [assignment] @classmethod def _from_generated(cls, generated): - block = cls(decode_base64_to_text(generated.name)) + try: + decoded_bytes = decode_base64_to_bytes(generated.name) + block_id = decoded_bytes.decode('utf-8') + # this is to fix a bug. When large blocks are uploaded through upload_blob the block id isn't base64 encoded + # while service expected block id is base64 encoded, so when we get block_id if we cannot base64 decode, it + # means we didn't base64 encode it when stage the block, we want to use the returned block_id directly. + except UnicodeDecodeError: + block_id = generated.name + block = cls(block_id) block.size = generated.size return block @@ -749,58 +807,53 @@ class PageRange(DictMixin): End of page range in bytes. """ - def __init__(self, start=None, end=None): + start: Optional[int] = None + """Start of page range in bytes.""" + end: Optional[int] = None + """End of page range in bytes.""" + cleared: bool + """Whether the range has been cleared.""" + + def __init__(self, start: Optional[int] = None, end: Optional[int] = None, *, cleared: bool = False) -> None: self.start = start self.end = end + self.cleared = cleared -class AccessPolicy(GenAccessPolicy): - """Access Policy class used by the set and get access policy methods in each service. +class PageRangePaged(PageIterator): + def __init__(self, command, results_per_page=None, continuation_token=None): + super(PageRangePaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.results_per_page = results_per_page + self.location_mode = None + self.current_page = [] - A stored access policy can specify the start time, expiry time, and - permissions for the Shared Access Signatures with which it's associated. - Depending on how you want to control access to your resource, you can - specify all of these parameters within the stored access policy, and omit - them from the URL for the Shared Access Signature. Doing so permits you to - modify the associated signature's behavior at any time, as well as to revoke - it. Or you can specify one or more of the access policy parameters within - the stored access policy, and the others on the URL. Finally, you can - specify all of the parameters on the URL. In this case, you can use the - stored access policy to revoke the signature, but not to modify its behavior. + def _get_next_cb(self, continuation_token): + try: + return self._command( + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) - Together the Shared Access Signature and the stored access policy must - include all fields required to authenticate the signature. If any required - fields are missing, the request will fail. Likewise, if a field is specified - both in the Shared Access Signature URL and in the stored access policy, the - request will fail with status code 400 (Bad Request). + def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.current_page = self._build_page(self._response) - :param permission: - The permissions associated with the shared access signature. The - user is restricted to operations allowed by the permissions. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has been - specified in an associated stored access policy. - :type permission: str or ~azure.storage.blob.ContainerSasPermissions - :param expiry: - The time at which the shared access signature becomes invalid. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has - been specified in an associated stored access policy. Azure will always - convert values to UTC. If a date is passed in without timezone info, it - is assumed to be UTC. - :type expiry: ~datetime.datetime or str - :param start: - The time at which the shared access signature becomes valid. If - omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. - :type start: ~datetime.datetime or str - """ - def __init__(self, permission=None, expiry=None, start=None): - self.start = start - self.expiry = expiry - self.permission = permission + return self._response.next_marker or None, self.current_page + + @staticmethod + def _build_page(response): + if not response: + raise StopIteration + + return parse_page_list(response) class ContainerSasPermissions(object): @@ -828,26 +881,92 @@ class ContainerSasPermissions(object): List blobs in the container. :param bool tag: Set or get tags on the blobs in the container. + :keyword bool add: + Add a block to an append blob. + :keyword bool create: + Write a new blob, snapshot a blob, or copy a blob to a new blob. + :keyword bool permanent_delete: + To enable permanent delete on the blob is permitted. + :keyword bool filter_by_tags: + To enable finding blobs by tags. + :keyword bool move: + Move a blob or a directory and its contents to a new location. + :keyword bool execute: + Get the system properties and, if the hierarchical namespace is enabled for the storage account, + get the POSIX ACL of a blob. + :keyword bool set_immutability_policy: + To enable operations related to set/delete immutability policy. + To get immutability policy, you just need read permission. """ - def __init__(self, read=False, write=False, delete=False, list=False, delete_previous_version=False, tag=False): # pylint: disable=redefined-builtin + + read: bool = False + """The read permission for container SAS.""" + write: bool = False + """The write permission for container SAS.""" + delete: bool = False + """The delete permission for container SAS.""" + delete_previous_version: bool = False + """Permission to delete previous blob version for versioning enabled + storage accounts.""" + list: bool = False + """The list permission for container SAS.""" + tag: bool = False + """Set or get tags on the blobs in the container.""" + add: Optional[bool] + """Add a block to an append blob.""" + create: Optional[bool] + """Write a new blob, snapshot a blob, or copy a blob to a new blob.""" + permanent_delete: Optional[bool] + """To enable permanent delete on the blob is permitted.""" + move: Optional[bool] + """Move a blob or a directory and its contents to a new location.""" + execute: Optional[bool] + """Get the system properties and, if the hierarchical namespace is enabled for the storage account, + get the POSIX ACL of a blob.""" + set_immutability_policy: Optional[bool] + """To get immutability policy, you just need read permission.""" + + def __init__( + self, read: bool = False, + write: bool = False, + delete: bool = False, + list: bool = False, + delete_previous_version: bool = False, + tag: bool = False, + **kwargs: Any + ) -> None: self.read = read + self.add = kwargs.pop('add', False) + self.create = kwargs.pop('create', False) self.write = write self.delete = delete - self.list = list self.delete_previous_version = delete_previous_version + self.permanent_delete = kwargs.pop('permanent_delete', False) + self.list = list self.tag = tag + self.filter_by_tags = kwargs.pop('filter_by_tags', False) + self.move = kwargs.pop('move', False) + self.execute = kwargs.pop('execute', False) + self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) self._str = (('r' if self.read else '') + + ('a' if self.add else '') + + ('c' if self.create else '') + ('w' if self.write else '') + ('d' if self.delete else '') + ('x' if self.delete_previous_version else '') + + ('y' if self.permanent_delete else '') + ('l' if self.list else '') + - ('t' if self.tag else '')) + ('t' if self.tag else '') + + ('f' if self.filter_by_tags else '') + + ('m' if self.move else '') + + ('e' if self.execute else '') + + ('i' if self.set_immutability_policy else '')) def __str__(self): return self._str @classmethod - def from_string(cls, permission): + def from_string(cls, permission: str) -> "ContainerSasPermissions": """Create a ContainerSasPermissions from a string. To specify read, write, delete, or list permissions you need only to @@ -860,17 +979,88 @@ def from_string(cls, permission): :rtype: ~azure.storage.blob.ContainerSasPermissions """ p_read = 'r' in permission + p_add = 'a' in permission + p_create = 'c' in permission p_write = 'w' in permission p_delete = 'd' in permission - p_list = 'l' in permission p_delete_previous_version = 'x' in permission + p_permanent_delete = 'y' in permission + p_list = 'l' in permission p_tag = 't' in permission + p_filter_by_tags = 'f' in permission + p_move = 'm' in permission + p_execute = 'e' in permission + p_set_immutability_policy = 'i' in permission parsed = cls(read=p_read, write=p_write, delete=p_delete, list=p_list, - delete_previous_version=p_delete_previous_version, tag=p_tag) + delete_previous_version=p_delete_previous_version, tag=p_tag, add=p_add, + create=p_create, permanent_delete=p_permanent_delete, filter_by_tags=p_filter_by_tags, + move=p_move, execute=p_execute, set_immutability_policy=p_set_immutability_policy) return parsed +class AccessPolicy(GenAccessPolicy): + """Access Policy class used by the set and get access policy methods in each service. + + A stored access policy can specify the start time, expiry time, and + permissions for the Shared Access Signatures with which it's associated. + Depending on how you want to control access to your resource, you can + specify all of these parameters within the stored access policy, and omit + them from the URL for the Shared Access Signature. Doing so permits you to + modify the associated signature's behavior at any time, as well as to revoke + it. Or you can specify one or more of the access policy parameters within + the stored access policy, and the others on the URL. Finally, you can + specify all of the parameters on the URL. In this case, you can use the + stored access policy to revoke the signature, but not to modify its behavior. + + Together the Shared Access Signature and the stored access policy must + include all fields required to authenticate the signature. If any required + fields are missing, the request will fail. Likewise, if a field is specified + both in the Shared Access Signature URL and in the stored access policy, the + request will fail with status code 400 (Bad Request). + + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: Optional[Union[ContainerSasPermissions, str]] + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :paramtype expiry: Optional[Union[str, datetime]] + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :paramtype start: Optional[Union[str, datetime]] + """ + + permission: Optional[Union[ContainerSasPermissions, str]] # type: ignore [assignment] + """The permissions associated with the shared access signature. The user is restricted to + operations allowed by the permissions.""" + expiry: Optional[Union["datetime", str]] # type: ignore [assignment] + """The time at which the shared access signature becomes invalid.""" + start: Optional[Union["datetime", str]] # type: ignore [assignment] + """The time at which the shared access signature becomes valid.""" + + def __init__( + self, permission: Optional[Union["ContainerSasPermissions", str]] = None, + expiry: Optional[Union[str, "datetime"]] = None, + start: Optional[Union[str, "datetime"]] = None + ) -> None: + self.start = start + self.expiry = expiry + self.permission = permission + + class BlobSasPermissions(object): """BlobSasPermissions class to be used with the :func:`~azure.storage.blob.generate_blob_sas` function. @@ -892,29 +1082,81 @@ class BlobSasPermissions(object): Delete the previous blob version for the versioning enabled storage account. :param bool tag: Set or get tags on the blob. + :keyword bool permanent_delete: + To enable permanent delete on the blob is permitted. + :keyword bool move: + Move a blob or a directory and its contents to a new location. + :keyword bool execute: + Get the system properties and, if the hierarchical namespace is enabled for the storage account, + get the POSIX ACL of a blob. + :keyword bool set_immutability_policy: + To enable operations related to set/delete immutability policy. + To get immutability policy, you just need read permission. """ - def __init__(self, read=False, add=False, create=False, write=False, - delete=False, delete_previous_version=False, tag=True): + + read: bool = False + """The read permission for Blob SAS.""" + add: Optional[bool] + """The add permission for Blob SAS.""" + create: Optional[bool] + """Write a new blob, snapshot a blob, or copy a blob to a new blob.""" + write: bool = False + """The write permission for Blob SAS.""" + delete: bool = False + """The delete permission for Blob SAS.""" + delete_previous_version: bool = False + """Permission to delete previous blob version for versioning enabled + storage accounts.""" + tag: bool = False + """Set or get tags on the blobs in the Blob.""" + permanent_delete: Optional[bool] + """To enable permanent delete on the blob is permitted.""" + move: Optional[bool] + """Move a blob or a directory and its contents to a new location.""" + execute: Optional[bool] + """Get the system properties and, if the hierarchical namespace is enabled for the storage account, + get the POSIX ACL of a blob.""" + set_immutability_policy: Optional[bool] + """To get immutability policy, you just need read permission.""" + + def __init__( + self, read: bool = False, + add: bool = False, + create: bool = False, + write: bool = False, + delete: bool = False, + delete_previous_version: bool = False, + tag: bool = False, + **kwargs: Any + ) -> None: self.read = read self.add = add self.create = create self.write = write self.delete = delete self.delete_previous_version = delete_previous_version + self.permanent_delete = kwargs.pop('permanent_delete', False) self.tag = tag + self.move = kwargs.pop('move', False) + self.execute = kwargs.pop('execute', False) + self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) self._str = (('r' if self.read else '') + ('a' if self.add else '') + ('c' if self.create else '') + ('w' if self.write else '') + ('d' if self.delete else '') + ('x' if self.delete_previous_version else '') + - ('t' if self.tag else '')) + ('y' if self.permanent_delete else '') + + ('t' if self.tag else '') + + ('m' if self.move else '') + + ('e' if self.execute else '') + + ('i' if self.set_immutability_policy else '')) def __str__(self): return self._str @classmethod - def from_string(cls, permission): + def from_string(cls, permission: str) -> "BlobSasPermissions": """Create a BlobSasPermissions from a string. To specify read, add, create, write, or delete permissions you need only to @@ -932,10 +1174,15 @@ def from_string(cls, permission): p_write = 'w' in permission p_delete = 'd' in permission p_delete_previous_version = 'x' in permission + p_permanent_delete = 'y' in permission p_tag = 't' in permission + p_move = 'm' in permission + p_execute = 'e' in permission + p_set_immutability_policy = 'i' in permission parsed = cls(read=p_read, add=p_add, create=p_create, write=p_write, delete=p_delete, - delete_previous_version=p_delete_previous_version, tag=p_tag) + delete_previous_version=p_delete_previous_version, tag=p_tag, permanent_delete=p_permanent_delete, + move=p_move, execute=p_execute, set_immutability_policy=p_set_immutability_policy) return parsed @@ -959,10 +1206,16 @@ class CustomerProvidedEncryptionKey(object): Base64-encoded AES-256 encryption key value. :param str key_hash: Base64-encoded SHA256 of the encryption key. - :ivar str algorithm: - Specifies the algorithm to use when encrypting data using the given key. Must be AES256. """ - def __init__(self, key_value, key_hash): + + key_value: str + """Base64-encoded AES-256 encryption key value.""" + key_hash: str + """Base64-encoded SHA256 of the encryption key.""" + algorithm: str + """Specifies the algorithm to use when encrypting data using the given key. Must be AES256.""" + + def __init__(self, key_value: str, key_hash: str) -> None: self.key_value = key_value self.key_hash = key_hash self.algorithm = 'AES256' @@ -984,7 +1237,14 @@ class ContainerEncryptionScope(object): set on the container. Default value is false. """ - def __init__(self, default_encryption_scope, **kwargs): + default_encryption_scope: str + """Specifies the default encryption scope to set on the container and use for + all future writes.""" + prevent_encryption_scope_override: bool + """If true, prevents any request from specifying a different encryption scope than the scope + set on the container.""" + + def __init__(self, default_encryption_scope: str, **kwargs: Any) -> None: self.default_encryption_scope = default_encryption_scope self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', False) @@ -999,17 +1259,17 @@ def _from_generated(cls, generated): return None -class DelimitedJsonDialect(object): +class DelimitedJsonDialect(DictMixin): """Defines the input or output JSON serialization for a blob data query. - :keyword str delimiter: The line separator character, default value is '\n' + :keyword str delimiter: The line separator character, default value is '\\\\n'. """ - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: self.delimiter = kwargs.pop('delimiter', '\n') -class DelimitedTextDialect(object): +class DelimitedTextDialect(DictMixin): """Defines the input or output delimited (CSV) serialization for a blob query request. :keyword str delimiter: @@ -1017,7 +1277,7 @@ class DelimitedTextDialect(object): :keyword str quotechar: Field quote, defaults to '"'. :keyword str lineterminator: - Record separator, defaults to '\n'. + Record separator, defaults to '\\\\n'. :keyword str escapechar: Escape char, defaults to empty. :keyword bool has_header: @@ -1025,7 +1285,8 @@ class DelimitedTextDialect(object): data will be returned inclusive of the first line. If set to True, the data will be returned exclusive of the first line. """ - def __init__(self, **kwargs): + + def __init__(self, **kwargs: Any) -> None: self.delimiter = kwargs.pop('delimiter', ',') self.quotechar = kwargs.pop('quotechar', '"') self.lineterminator = kwargs.pop('lineterminator', '\n') @@ -1043,11 +1304,12 @@ class ArrowDialect(ArrowField): :keyword int precision: The precision of the field. :keyword int scale: The scale of the field. """ - def __init__(self, type, **kwargs): # pylint: disable=redefined-builtin + + def __init__(self, type, **kwargs: Any) -> None: # pylint: disable=redefined-builtin super(ArrowDialect, self).__init__(type=type, **kwargs) -class ArrowType(str, Enum): +class ArrowType(str, Enum, metaclass=CaseInsensitiveEnumMeta): INT64 = "int64" BOOL = "bool" @@ -1057,50 +1319,188 @@ class ArrowType(str, Enum): DECIMAL = 'decimal' -class ObjectReplicationPolicy(DictMixin): - """Policy id and rule ids applied to a blob. +class ObjectReplicationRule(DictMixin): + """Policy id and rule ids applied to a blob.""" - :ivar str policy_id: - Policy id for the blob. A replication policy gets created (policy id) when creating a source/destination pair. - :ivar list(~azure.storage.blob.ObjectReplicationRule) rules: - Within each policy there may be multiple replication rules. - e.g. rule 1= src/container/.pdf to dst/container2/; rule2 = src/container1/.jpg to dst/container3 - """ + rule_id: str + """Rule id.""" + status: str + """The status of the rule. It could be "Complete" or "Failed" """ - def __init__(self, **kwargs): - self.policy_id = kwargs.pop('policy_id', None) - self.rules = kwargs.pop('rules', None) + def __init__(self, **kwargs: Any) -> None: + self.rule_id = kwargs.pop('rule_id', None) # type: ignore [assignment] + self.status = kwargs.pop('status', None) # type: ignore [assignment] -class ObjectReplicationRule(DictMixin): - """Policy id and rule ids applied to a blob. +class ObjectReplicationPolicy(DictMixin): + """Policy id and rule ids applied to a blob.""" - :ivar str rule_id: - Rule id. - :ivar str status: - The status of the rule. It could be "Complete" or "Failed" - """ + policy_id: str + """Policy id for the blob. A replication policy gets created (policy id) when creating a source/destination pair.""" + rules: List[ObjectReplicationRule] + """Within each policy there may be multiple replication rules. + e.g. rule 1= src/container/.pdf to dst/container2/; rule2 = src/container1/.jpg to dst/container3""" - def __init__(self, **kwargs): - self.rule_id = kwargs.pop('rule_id', None) - self.status = kwargs.pop('status', None) + def __init__(self, **kwargs: Any) -> None: + self.policy_id = kwargs.pop('policy_id', None) # type: ignore [assignment] + self.rules = kwargs.pop('rules', []) + + +class BlobProperties(DictMixin): + """Blob Properties.""" + + name: str + """The name of the blob.""" + container: str + """The container in which the blob resides.""" + snapshot: Optional[str] + """Datetime value that uniquely identifies the blob snapshot.""" + blob_type: "BlobType" + """String indicating this blob's type.""" + metadata: Dict[str, str] + """Name-value pairs associated with the blob as metadata.""" + last_modified: "datetime" + """A datetime object representing the last time the blob was modified.""" + etag: str + """The ETag contains a value that you can use to perform operations + conditionally.""" + size: int + """The size of the content returned. If the entire blob was requested, + the length of blob in bytes. If a subset of the blob was requested, the + length of the returned subset.""" + content_range: Optional[str] + """Indicates the range of bytes returned in the event that the client + requested a subset of the blob.""" + append_blob_committed_block_count: Optional[int] + """(For Append Blobs) Number of committed blocks in the blob.""" + is_append_blob_sealed: Optional[bool] + """Indicate if the append blob is sealed or not.""" + page_blob_sequence_number: Optional[int] + """(For Page Blobs) Sequence number for page blob used for coordinating + concurrent writes.""" + server_encrypted: bool + """Set to true if the blob is encrypted on the server.""" + copy: "CopyProperties" + """Stores all the copy properties for the blob.""" + content_settings: ContentSettings + """Stores all the content settings for the blob.""" + lease: LeaseProperties + """Stores all the lease information for the blob.""" + blob_tier: Optional[StandardBlobTier] + """Indicates the access tier of the blob. The hot tier is optimized + for storing data that is accessed frequently. The cool storage tier + is optimized for storing data that is infrequently accessed and stored + for at least a month. The archive tier is optimized for storing + data that is rarely accessed and stored for at least six months + with flexible latency requirements.""" + rehydrate_priority: Optional[str] + """Indicates the priority with which to rehydrate an archived blob""" + blob_tier_change_time: Optional["datetime"] + """Indicates when the access tier was last changed.""" + blob_tier_inferred: Optional[bool] + """Indicates whether the access tier was inferred by the service. + If false, it indicates that the tier was set explicitly.""" + deleted: Optional[bool] + """Whether this blob was deleted.""" + deleted_time: Optional["datetime"] + """A datetime object representing the time at which the blob was deleted.""" + remaining_retention_days: Optional[int] + """The number of days that the blob will be retained before being permanently deleted by the service.""" + creation_time: "datetime" + """Indicates when the blob was created, in UTC.""" + archive_status: Optional[str] + """Archive status of blob.""" + encryption_key_sha256: Optional[str] + """The SHA-256 hash of the provided encryption key.""" + encryption_scope: Optional[str] + """A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised.""" + request_server_encrypted: Optional[bool] + """Whether this blob is encrypted.""" + object_replication_source_properties: Optional[List[ObjectReplicationPolicy]] + """Only present for blobs that have policy ids and rule ids applied to them.""" + object_replication_destination_policy: Optional[str] + """Represents the Object Replication Policy Id that created this blob.""" + last_accessed_on: Optional["datetime"] + """Indicates when the last Read/Write operation was performed on a Blob.""" + tag_count: Optional[int] + """Tags count on this blob.""" + tags: Optional[Dict[str, str]] + """Key value pair of tags on this blob.""" + has_versions_only: Optional[bool] + """A true value indicates the root blob is deleted""" + immutability_policy: ImmutabilityPolicy + """Specifies the immutability policy of a blob, blob snapshot or blob version.""" + has_legal_hold: Optional[bool] + """Specified if a legal hold should be set on the blob. + Currently this parameter of upload_blob() API is for BlockBlob only.""" + + def __init__(self, **kwargs: Any) -> None: + self.name = kwargs.get('name') # type: ignore [assignment] + self.container = None # type: ignore [assignment] + self.snapshot = kwargs.get('x-ms-snapshot') + self.version_id = kwargs.get('x-ms-version-id') + self.is_current_version = kwargs.get('x-ms-is-current-version') + self.blob_type = BlobType(kwargs['x-ms-blob-type']) if ( + kwargs.get('x-ms-blob-type')) else None # type: ignore [assignment] + self.metadata = kwargs.get('metadata') # type: ignore [assignment] + self.encrypted_metadata = kwargs.get('encrypted_metadata') + self.last_modified = kwargs.get('Last-Modified') # type: ignore [assignment] + self.etag = kwargs.get('ETag') # type: ignore [assignment] + self.size = kwargs.get('Content-Length') # type: ignore [assignment] + self.content_range = kwargs.get('Content-Range') + self.append_blob_committed_block_count = kwargs.get('x-ms-blob-committed-block-count') + self.is_append_blob_sealed = kwargs.get('x-ms-blob-sealed') + self.page_blob_sequence_number = kwargs.get('x-ms-blob-sequence-number') + self.server_encrypted = kwargs.get('x-ms-server-encrypted') # type: ignore [assignment] + self.copy = CopyProperties(**kwargs) + self.content_settings = ContentSettings(**kwargs) + self.lease = LeaseProperties(**kwargs) + self.blob_tier = kwargs.get('x-ms-access-tier') + self.rehydrate_priority = kwargs.get('x-ms-rehydrate-priority') + self.blob_tier_change_time = kwargs.get('x-ms-access-tier-change-time') + self.blob_tier_inferred = kwargs.get('x-ms-access-tier-inferred') + self.deleted = False + self.deleted_time = None + self.remaining_retention_days = None + self.creation_time = kwargs.get('x-ms-creation-time') # type: ignore [assignment] + self.archive_status = kwargs.get('x-ms-archive-status') + self.encryption_key_sha256 = kwargs.get('x-ms-encryption-key-sha256') + self.encryption_scope = kwargs.get('x-ms-encryption-scope') + self.request_server_encrypted = kwargs.get('x-ms-server-encrypted') + self.object_replication_source_properties = kwargs.get('object_replication_source_properties') + self.object_replication_destination_policy = kwargs.get('x-ms-or-policy-id') + self.last_accessed_on = kwargs.get('x-ms-last-access-time') + self.tag_count = kwargs.get('x-ms-tag-count') + self.tags = None + self.immutability_policy = ImmutabilityPolicy(expiry_time=kwargs.get('x-ms-immutability-policy-until-date'), + policy_mode=kwargs.get('x-ms-immutability-policy-mode')) + self.has_legal_hold = kwargs.get('x-ms-legal-hold') + self.has_versions_only = None class BlobQueryError(object): - """The error happened during quick query operation. + """The error happened during quick query operation.""" - :ivar str error: - The name of the error. - :ivar bool is_fatal: - If true, this error prevents further query processing. More result data may be returned, + error: Optional[str] + """The name of the error.""" + is_fatal: bool + """If true, this error prevents further query processing. More result data may be returned, but there is no guarantee that all of the original data will be processed. - If false, this error does not prevent further query processing. - :ivar str description: - A description of the error. - :ivar int position: - The blob offset at which the error occurred. - """ - def __init__(self, error=None, is_fatal=False, description=None, position=None): + If false, this error does not prevent further query processing.""" + description: Optional[str] + """A description of the error.""" + position: Optional[int] + """The blob offset at which the error occurred.""" + + def __init__( + self, error: Optional[str] = None, + is_fatal: bool = False, + description: Optional[str] = None, + position: Optional[int] = None + ) -> None: self.error = error self.is_fatal = is_fatal self.description = description diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_quick_query_helper.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_quick_query_helper.py index eb51d987b9ac..95f8a4427bba 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_quick_query_helper.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_quick_query_helper.py @@ -5,37 +5,39 @@ # -------------------------------------------------------------------------- from io import BytesIO -from typing import Union, Iterable, IO # pylint: disable=unused-import +from typing import Any, Dict, Generator, IO, Iterable, Optional, Type, Union, TYPE_CHECKING -from ._shared.avro.datafile import DataFileReader from ._shared.avro.avro_io import DatumReader +from ._shared.avro.datafile import DataFileReader + +if TYPE_CHECKING: + from ._models import BlobQueryError class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes - """A streaming object to read query results. - - :ivar str name: - The name of the blob being quered. - :ivar str container: - The name of the container where the blob is. - :ivar dict response_headers: - The response_headers of the quick query request. - :ivar bytes record_delimiter: - The delimiter used to separate lines, or records with the data. The `records` - method will return these lines via a generator. - """ + """A streaming object to read query results.""" + + name: str + """The name of the blob being quered.""" + container: str + """The name of the container where the blob is.""" + response_headers: Dict[str, Any] + """The response_headers of the quick query request.""" + record_delimiter: str + """The delimiter used to separate lines, or records with the data. The `records` + method will return these lines via a generator.""" def __init__( self, - name=None, - container=None, - errors=None, - record_delimiter='\n', - encoding=None, - headers=None, - response=None, - error_cls=None, - ): + name: str = None, # type: ignore [assignment] + container: str = None, # type: ignore [assignment] + errors: Any = None, + record_delimiter: str = '\n', + encoding: Optional[str] = None, + headers: Dict[str, Any] = None, # type: ignore [assignment] + response: Any = None, + error_cls: Type["BlobQueryError"] = None, # type: ignore [assignment] + ) -> None: self.name = name self.container = container self.response_headers = headers @@ -51,7 +53,7 @@ def __init__( def __len__(self): return self._size - def _process_record(self, result): + def _process_record(self, result: Dict[str, Any]) -> Optional[bytes]: self._size = result.get('totalBytes', self._size) self._bytes_processed = result.get('bytesScanned', self._bytes_processed) if 'data' in result: @@ -67,7 +69,7 @@ def _process_record(self, result): self._errors(error) return None - def _iter_stream(self): + def _iter_stream(self) -> Generator[bytes, None, None]: if self._first_result is not None: yield self._first_result for next_result in self._parsed_results: @@ -75,14 +77,14 @@ def _iter_stream(self): if processed_result is not None: yield processed_result - def readall(self): - # type: () -> Union[bytes, str] + def readall(self) -> Union[bytes, str]: """Return all query results. This operation is blocking until all data is downloaded. If encoding has been configured - this will be used to decode individual records are they are received. + :returns: The query results. :rtype: Union[bytes, str] """ stream = BytesIO() @@ -92,11 +94,10 @@ def readall(self): return data.decode(self._encoding) return data - def readinto(self, stream): - # type: (IO) -> None + def readinto(self, stream: IO) -> None: """Download the query result to a stream. - :param stream: + :param IO stream: The stream to download to. This can be an open file-handle, or any writable stream. :returns: None @@ -104,14 +105,14 @@ def readinto(self, stream): for record in self._iter_stream(): stream.write(record) - def records(self): - # type: () -> Iterable[Union[bytes, str]] + def records(self) -> Iterable[Union[bytes, str]]: """Returns a record generator for the query result. Records will be returned line by line. If encoding has been configured - this will be used to decode individual records are they are received. + :returns: A record generator for the query result. :rtype: Iterable[Union[bytes, str]] """ delimiter = self.record_delimiter.encode('utf-8') @@ -123,7 +124,6 @@ def records(self): yield record - class QuickQueryStreamer(object): """ File-like streaming iterator. @@ -153,8 +153,6 @@ def __next__(self): self._download_offset += len(next_part) return next_part - next = __next__ # Python 2 compatibility. - def tell(self): return self._point @@ -165,7 +163,7 @@ def seek(self, offset, whence=0): self._point += offset else: raise ValueError("whence must be 0, or 1") - if self._point < 0: + if self._point < 0: # pylint: disable=consider-using-max-builtin self._point = 0 # XXX is this right? def read(self, size): diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_serialize.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_serialize.py index fbfed9c9974c..e9d5eb190959 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_serialize.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_serialize.py @@ -3,7 +3,8 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=no-self-use +from typing import Any, cast, Dict, Optional, Tuple, Union, TYPE_CHECKING + try: from urllib.parse import quote except ImportError: @@ -11,23 +12,25 @@ from azure.core import MatchConditions -from ._models import ( - ContainerEncryptionScope, - DelimitedJsonDialect) from ._generated.models import ( - ModifiedAccessConditions, - SourceModifiedAccessConditions, - CpkScopeInfo, + ArrowConfiguration, + BlobTag, + BlobTags, ContainerCpkScopeInfo, - QueryFormat, - QuerySerialization, + CpkScopeInfo, DelimitedTextConfiguration, JsonTextConfiguration, - ArrowConfiguration, + LeaseAccessConditions, + ModifiedAccessConditions, + QueryFormat, QueryFormatType, - BlobTag, - BlobTags, LeaseAccessConditions + QuerySerialization, + SourceModifiedAccessConditions ) +from ._models import ContainerEncryptionScope, DelimitedJsonDialect + +if TYPE_CHECKING: + from ._lease import BlobLeaseClient _SUPPORTED_API_VERSIONS = [ @@ -36,37 +39,57 @@ '2019-10-10', '2019-12-12', '2020-02-10', - '2020-04-08' + '2020-04-08', + '2020-06-12', + '2020-08-04', + '2020-10-02', + '2020-12-06', + '2021-02-12', + '2021-04-10', + '2021-06-08', + '2021-08-06', + '2021-12-02', + '2022-11-02', + '2023-01-03', + '2023-05-03', + '2023-08-03', + '2023-11-03', + '2024-05-04', + '2024-08-04', + '2024-11-04', + '2025-01-05', ] -def _get_match_headers(kwargs, match_param, etag_param): - # type: (str) -> Tuple(Dict[str, Any], Optional[str], Optional[str]) +def _get_match_headers( + kwargs: Dict[str, Any], + match_param: str, + etag_param: str +) -> Tuple[Optional[str], Optional[Any]]: if_match = None if_none_match = None match_condition = kwargs.pop(match_param, None) if match_condition == MatchConditions.IfNotModified: if_match = kwargs.pop(etag_param, None) if not if_match: - raise ValueError("'{}' specified without '{}'.".format(match_param, etag_param)) + raise ValueError(f"'{match_param}' specified without '{etag_param}'.") elif match_condition == MatchConditions.IfPresent: if_match = '*' elif match_condition == MatchConditions.IfModified: if_none_match = kwargs.pop(etag_param, None) if not if_none_match: - raise ValueError("'{}' specified without '{}'.".format(match_param, etag_param)) + raise ValueError(f"'{match_param}' specified without '{etag_param}'.") elif match_condition == MatchConditions.IfMissing: if_none_match = '*' elif match_condition is None: if kwargs.get(etag_param): - raise ValueError("'{}' specified without '{}'.".format(etag_param, match_param)) + raise ValueError(f"'{etag_param}' specified without '{match_param}'.") else: - raise TypeError("Invalid match condition: {}".format(match_condition)) + raise TypeError(f"Invalid match condition: {match_condition}") return if_match, if_none_match -def get_access_conditions(lease): - # type: (Optional[Union[BlobLeaseClient, str]]) -> Union[LeaseAccessConditions, None] +def get_access_conditions(lease: Optional[Union["BlobLeaseClient", str]]) -> Optional[LeaseAccessConditions]: try: lease_id = lease.id # type: ignore except AttributeError: @@ -74,8 +97,7 @@ def get_access_conditions(lease): return LeaseAccessConditions(lease_id=lease_id) if lease_id else None -def get_modify_conditions(kwargs): - # type: (Dict[str, Any]) -> ModifiedAccessConditions +def get_modify_conditions(kwargs: Dict[str, Any]) -> ModifiedAccessConditions: if_match, if_none_match = _get_match_headers(kwargs, 'match_condition', 'etag') return ModifiedAccessConditions( if_modified_since=kwargs.pop('if_modified_since', None), @@ -86,8 +108,7 @@ def get_modify_conditions(kwargs): ) -def get_source_conditions(kwargs): - # type: (Dict[str, Any]) -> SourceModifiedAccessConditions +def get_source_conditions(kwargs: Dict[str, Any]) -> SourceModifiedAccessConditions: if_match, if_none_match = _get_match_headers(kwargs, 'source_match_condition', 'source_etag') return SourceModifiedAccessConditions( source_if_modified_since=kwargs.pop('source_if_modified_since', None), @@ -98,15 +119,13 @@ def get_source_conditions(kwargs): ) -def get_cpk_scope_info(kwargs): - # type: (Dict[str, Any]) -> CpkScopeInfo +def get_cpk_scope_info(kwargs: Dict[str, Any]) -> Optional[CpkScopeInfo]: if 'encryption_scope' in kwargs: return CpkScopeInfo(encryption_scope=kwargs.pop('encryption_scope')) return None -def get_container_cpk_scope_info(kwargs): - # type: (Dict[str, Any]) -> ContainerCpkScopeInfo +def get_container_cpk_scope_info(kwargs: Dict[str, Any]) -> Optional[ContainerCpkScopeInfo]: encryption_scope = kwargs.pop('container_encryption_scope', None) if encryption_scope: if isinstance(encryption_scope, ContainerEncryptionScope): @@ -123,17 +142,19 @@ def get_container_cpk_scope_info(kwargs): return None -def get_api_version(kwargs, default): - # type: (Dict[str, Any]) -> str - api_version = kwargs.pop('api_version', None) +def get_api_version(kwargs: Dict[str, Any]) -> str: + api_version = kwargs.get('api_version', None) if api_version and api_version not in _SUPPORTED_API_VERSIONS: versions = '\n'.join(_SUPPORTED_API_VERSIONS) - raise ValueError("Unsupported API version '{}'. Please select from:\n{}".format(api_version, versions)) - return api_version or default + raise ValueError(f"Unsupported API version '{api_version}'. Please select from:\n{versions}") + return api_version or _SUPPORTED_API_VERSIONS[-1] +def get_version_id(self_vid: Optional[str], kwargs: Dict[str, Any]) -> Optional[str]: + if 'version_id' in kwargs: + return cast(str, kwargs.pop('version_id')) + return self_vid -def serialize_blob_tags_header(tags=None): - # type: (Optional[Dict[str, str]]) -> str +def serialize_blob_tags_header(tags: Optional[Dict[str, str]] = None) -> Optional[str]: if tags is None: return None @@ -151,28 +172,27 @@ def serialize_blob_tags_header(tags=None): return ''.join(components) -def serialize_blob_tags(tags=None): - # type: (Optional[Dict[str, str]]) -> Union[BlobTags, None] +def serialize_blob_tags(tags: Optional[Dict[str, str]] = None) -> BlobTags: tag_list = [] if tags: tag_list = [BlobTag(key=k, value=v) for k, v in tags.items()] return BlobTags(blob_tag_set=tag_list) -def serialize_query_format(formater): - if isinstance(formater, DelimitedJsonDialect): - serialization_settings = JsonTextConfiguration( - record_separator=formater.delimiter - ) - qq_format = QueryFormat( - type=QueryFormatType.json, - json_text_configuration=serialization_settings) +def serialize_query_format(formater: Union[str, DelimitedJsonDialect]) -> Optional[QuerySerialization]: + if formater == "ParquetDialect": + qq_format = QueryFormat(type=QueryFormatType.PARQUET, parquet_text_configuration=' ') #type: ignore [arg-type] + elif isinstance(formater, DelimitedJsonDialect): + json_serialization_settings = JsonTextConfiguration(record_separator=formater.delimiter) + qq_format = QueryFormat(type=QueryFormatType.JSON, json_text_configuration=json_serialization_settings) elif hasattr(formater, 'quotechar'): # This supports a csv.Dialect as well try: - headers = formater.has_header + headers = formater.has_header # type: ignore except AttributeError: headers = False - serialization_settings = DelimitedTextConfiguration( + if isinstance(formater, str): + raise ValueError("Unknown string value provided. Accepted values: ParquetDialect") + csv_serialization_settings = DelimitedTextConfiguration( column_separator=formater.delimiter, field_quote=formater.quotechar, record_separator=formater.lineterminator, @@ -180,18 +200,14 @@ def serialize_query_format(formater): headers_present=headers ) qq_format = QueryFormat( - type=QueryFormatType.delimited, - delimited_text_configuration=serialization_settings + type=QueryFormatType.DELIMITED, + delimited_text_configuration=csv_serialization_settings ) elif isinstance(formater, list): - serialization_settings = ArrowConfiguration( - schema=formater - ) - qq_format = QueryFormat( - type=QueryFormatType.arrow, - arrow_configuration=serialization_settings) + arrow_serialization_settings = ArrowConfiguration(schema=formater) + qq_format = QueryFormat(type=QueryFormatType.arrow, arrow_configuration=arrow_serialization_settings) elif not formater: return None else: - raise TypeError("Format must be DelimitedTextDialect or DelimitedJsonDialect.") + raise TypeError("Format must be DelimitedTextDialect or DelimitedJsonDialect or ParquetDialect.") return QuerySerialization(format=qq_format) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/__init__.py index 160f88223820..a8b1a27d48f9 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/__init__.py @@ -13,8 +13,6 @@ except ImportError: from urllib2 import quote, unquote # type: ignore -import six - def url_quote(url): return quote(url) @@ -25,14 +23,14 @@ def url_unquote(url): def encode_base64(data): - if isinstance(data, six.text_type): + if isinstance(data, str): data = data.encode('utf-8') encoded = base64.b64encode(data) return encoded.decode('utf-8') def decode_base64_to_bytes(data): - if isinstance(data, six.text_type): + if isinstance(data, str): data = data.encode('utf-8') return base64.b64decode(data) @@ -46,9 +44,9 @@ def sign_string(key, string_to_sign, key_is_base64=True): if key_is_base64: key = decode_base64_to_bytes(key) else: - if isinstance(key, six.text_type): + if isinstance(key, str): key = key.encode('utf-8') - if isinstance(string_to_sign, six.text_type): + if isinstance(string_to_sign, str): string_to_sign = string_to_sign.encode('utf-8') signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256) digest = signed_hmac_sha256.digest() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/authentication.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/authentication.py index d04c1e4fb539..e4d5ed730846 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/authentication.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/authentication.py @@ -5,13 +5,10 @@ # -------------------------------------------------------------------------- import logging -import sys - -try: - from urllib.parse import urlparse, unquote -except ImportError: - from urlparse import urlparse # type: ignore - from urllib2 import unquote # type: ignore +import re +from typing import List, Tuple +from urllib.parse import unquote, urlparse +from functools import cmp_to_key try: from yarl import URL @@ -19,7 +16,7 @@ pass try: - from azure.core.pipeline.transport import AioHttpTransport + from azure.core.pipeline.transport import AioHttpTransport # pylint: disable=non-abstract-transport-import except ImportError: AioHttpTransport = None @@ -28,24 +25,96 @@ from . import sign_string - logger = logging.getLogger(__name__) +table_lv0 = [ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71c, 0x0, 0x71f, 0x721, 0x723, 0x725, + 0x0, 0x0, 0x0, 0x72d, 0x803, 0x0, 0x0, 0x733, 0x0, 0xd03, 0xd1a, 0xd1c, 0xd1e, + 0xd20, 0xd22, 0xd24, 0xd26, 0xd28, 0xd2a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0xe02, 0xe09, 0xe0a, 0xe1a, 0xe21, 0xe23, 0xe25, 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, + 0xe70, 0xe7c, 0xe7e, 0xe89, 0xe8a, 0xe91, 0xe99, 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, + 0x0, 0x0, 0x0, 0x743, 0x744, 0x748, 0xe02, 0xe09, 0xe0a, 0xe1a, 0xe21, 0xe23, 0xe25, + 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, 0xe70, 0xe7c, 0xe7e, 0xe89, 0xe8a, 0xe91, 0xe99, + 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, 0x0, 0x74c, 0x0, 0x750, 0x0, +] + +table_lv4 = [ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8012, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8212, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, +] + +def compare(lhs: str, rhs: str) -> int: # pylint:disable=too-many-return-statements + tables = [table_lv0, table_lv4] + curr_level, i, j, n = 0, 0, 0, len(tables) + lhs_len = len(lhs) + rhs_len = len(rhs) + while curr_level < n: + if curr_level == (n - 1) and i != j: + if i > j: + return -1 + if i < j: + return 1 + return 0 + + w1 = tables[curr_level][ord(lhs[i])] if i < lhs_len else 0x1 + w2 = tables[curr_level][ord(rhs[j])] if j < rhs_len else 0x1 + + if w1 == 0x1 and w2 == 0x1: + i = 0 + j = 0 + curr_level += 1 + elif w1 == w2: + i += 1 + j += 1 + elif w1 == 0: + i += 1 + elif w2 == 0: + j += 1 + else: + if w1 < w2: + return -1 + if w1 > w2: + return 1 + return 0 + return 0 + # wraps a given exception with the desired exception type def _wrap_exception(ex, desired_type): msg = "" if ex.args: msg = ex.args[0] - if sys.version_info >= (3,): - # Automatic chaining in Python 3 means we keep the trace - return desired_type(msg) - # There isn't a good solution in 2 for keeping the stack trace - # in general, or that will not result in an error in 3 - # However, we can keep the previous error type and message - # TODO: In the future we will log the trace - return desired_type('{}: {}'.format(ex.__class__.__name__, msg)) + return desired_type(msg) + +# This method attempts to emulate the sorting done by the service +def _storage_header_sort(input_headers: List[Tuple[str, str]]) -> List[Tuple[str, str]]: + + # Build dict of tuples and list of keys + header_dict = {} + header_keys = [] + for k, v in input_headers: + header_dict[k] = v + header_keys.append(k) + + try: + header_keys = sorted(header_keys, key=cmp_to_key(compare)) + except ValueError as exc: + raise ValueError("Illegal character encountered when sorting headers.") from exc + + # Build list of sorted tuples + sorted_headers = [] + for key in header_keys: + sorted_headers.append((key, header_dict.pop(key))) + return sorted_headers class AzureSigningError(ClientAuthenticationError): @@ -56,7 +125,6 @@ class AzureSigningError(ClientAuthenticationError): """ -# pylint: disable=no-self-use class SharedKeyCredentialPolicy(SansIOHTTPPolicy): def __init__(self, account_name, account_key): @@ -95,7 +163,7 @@ def _get_canonicalized_headers(request): for name, value in request.http_request.headers.items(): if name.startswith('x-ms-'): x_ms_headers.append((name.lower(), value)) - x_ms_headers.sort() + x_ms_headers = _storage_header_sort(x_ms_headers) for name, value in x_ms_headers: if value is not None: string_to_sign += ''.join([name, ':', value, '\n']) @@ -121,7 +189,7 @@ def _add_authorization_header(self, request, string_to_sign): except Exception as ex: # Wrap any error that occurred as signing error # Doing so will clarify/locate the source of problem - raise _wrap_exception(ex, AzureSigningError) + raise _wrap_exception(ex, AzureSigningError) from ex def on_request(self, request): string_to_sign = \ @@ -139,4 +207,39 @@ def on_request(self, request): self._get_canonicalized_resource_query(request) self._add_authorization_header(request, string_to_sign) - #logger.debug("String_to_sign=%s", string_to_sign) + # logger.debug("String_to_sign=%s", string_to_sign) + + +class StorageHttpChallenge(object): + def __init__(self, challenge): + """ Parses an HTTP WWW-Authentication Bearer challenge from the Storage service. """ + if not challenge: + raise ValueError("Challenge cannot be empty") + + self._parameters = {} + self.scheme, trimmed_challenge = challenge.strip().split(" ", 1) + + # name=value pairs either comma or space separated with values possibly being + # enclosed in quotes + for item in re.split('[, ]', trimmed_challenge): + comps = item.split("=") + if len(comps) == 2: + key = comps[0].strip(' "') + value = comps[1].strip(' "') + if key: + self._parameters[key] = value + + # Extract and verify required parameters + self.authorization_uri = self._parameters.get('authorization_uri') + if not self.authorization_uri: + raise ValueError("Authorization Uri not found") + + self.resource_id = self._parameters.get('resource_id') + if not self.resource_id: + raise ValueError("Resource id not found") + + uri_path = urlparse(self.authorization_uri).path.lstrip("/") + self.tenant_id = uri_path.split("/")[0] + + def get_value(self, key): + return self._parameters.get(key) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/avro_io.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/avro_io.py index 93a5c134849a..3e46f1fb53fe 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/avro_io.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/avro_io.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-missing-return, docstring-missing-rtype """Input/output utilities. @@ -52,7 +53,7 @@ class SchemaResolutionException(schema.AvroException): def __init__(self, fail_msg, writer_schema=None): pretty_writers = json.dumps(json.loads(str(writer_schema)), indent=2) if writer_schema: - fail_msg += "\nWriter's Schema: %s" % pretty_writers + fail_msg += f"\nWriter's Schema: {pretty_writers}" schema.AvroException.__init__(self, fail_msg) # ------------------------------------------------------------------------------ @@ -76,10 +77,9 @@ def reader(self): def read(self, n): """Read n bytes. - Args: - n: Number of bytes to read. - Returns: - The next n bytes from the input. + :param int n: Number of bytes to read. + :returns: The next n bytes from the input. + :rtype: bytes """ assert (n >= 0), n input_bytes = self.reader.read(n) @@ -105,7 +105,7 @@ def read_boolean(self): return True if b == 0: return False - fail_msg = "Invalid value for boolean: %s" % b + fail_msg = f"Invalid value for boolean: {b}" raise schema.AvroException(fail_msg) def read_int(self): @@ -253,7 +253,7 @@ def read_data(self, writer_schema, decoder): elif writer_schema.type in ['record', 'error', 'request']: result = self.read_record(writer_schema, decoder) else: - fail_msg = "Cannot read unknown schema type: %s" % writer_schema.type + fail_msg = f"Cannot read unknown schema type: {writer_schema.type}" raise schema.AvroException(fail_msg) return result @@ -290,33 +290,26 @@ def skip_data(self, writer_schema, decoder): self.skip_record(writer_schema, decoder) result = None else: - fail_msg = "Unknown schema type: %s" % writer_schema.type + fail_msg = f"Unknown schema type: {writer_schema.type}" raise schema.AvroException(fail_msg) return result + # Fixed instances are encoded using the number of bytes declared in the schema. @staticmethod def read_fixed(writer_schema, decoder): - """ - Fixed instances are encoded using the number of bytes declared - in the schema. - """ return decoder.read(writer_schema.size) @staticmethod def skip_fixed(writer_schema, decoder): return decoder.skip(writer_schema.size) + # An enum is encoded by a int, representing the zero-based position of the symbol in the schema. @staticmethod def read_enum(writer_schema, decoder): - """ - An enum is encoded by a int, representing the zero-based position - of the symbol in the schema. - """ # read data index_of_symbol = decoder.read_int() if index_of_symbol >= len(writer_schema.symbols): - fail_msg = "Can't access enum index %d for enum with %d symbols" \ - % (index_of_symbol, len(writer_schema.symbols)) + fail_msg = f"Can't access enum index {index_of_symbol} for enum with {len(writer_schema.symbols)} symbols" raise SchemaResolutionException(fail_msg, writer_schema) read_symbol = writer_schema.symbols[index_of_symbol] return read_symbol @@ -325,21 +318,15 @@ def read_enum(writer_schema, decoder): def skip_enum(decoder): return decoder.skip_int() + # Arrays are encoded as a series of blocks. + + # Each block consists of a long count value, followed by that many array items. + # A block with count zero indicates the end of the array. Each item is encoded per the array's item schema. + + # If a block's count is negative, then the count is followed immediately by a long block size, + # indicating the number of bytes in the block. + # The actual count in this case is the absolute value of the count written. def read_array(self, writer_schema, decoder): - """ - Arrays are encoded as a series of blocks. - - Each block consists of a long count value, - followed by that many array items. - A block with count zero indicates the end of the array. - Each item is encoded per the array's item schema. - - If a block's count is negative, - then the count is followed immediately by a long block size, - indicating the number of bytes in the block. - The actual count in this case - is the absolute value of the count written. - """ read_items = [] block_count = decoder.read_long() while block_count != 0: @@ -362,21 +349,15 @@ def skip_array(self, writer_schema, decoder): self.skip_data(writer_schema.items, decoder) block_count = decoder.read_long() + # Maps are encoded as a series of blocks. + + # Each block consists of a long count value, followed by that many key/value pairs. + # A block with count zero indicates the end of the map. Each item is encoded per the map's value schema. + + # If a block's count is negative, then the count is followed immediately by a long block size, + # indicating the number of bytes in the block. + # The actual count in this case is the absolute value of the count written. def read_map(self, writer_schema, decoder): - """ - Maps are encoded as a series of blocks. - - Each block consists of a long count value, - followed by that many key/value pairs. - A block with count zero indicates the end of the map. - Each item is encoded per the map's value schema. - - If a block's count is negative, - then the count is followed immediately by a long block size, - indicating the number of bytes in the block. - The actual count in this case - is the absolute value of the count written. - """ read_items = {} block_count = decoder.read_long() while block_count != 0: @@ -401,17 +382,15 @@ def skip_map(self, writer_schema, decoder): self.skip_data(writer_schema.values, decoder) block_count = decoder.read_long() + # A union is encoded by first writing a long value indicating + # the zero-based position within the union of the schema of its value. + # The value is then encoded per the indicated schema within the union. def read_union(self, writer_schema, decoder): - """ - A union is encoded by first writing a long value indicating - the zero-based position within the union of the schema of its value. - The value is then encoded per the indicated schema within the union. - """ # schema resolution index_of_schema = int(decoder.read_long()) if index_of_schema >= len(writer_schema.schemas): - fail_msg = "Can't access branch index %d for union with %d branches" \ - % (index_of_schema, len(writer_schema.schemas)) + fail_msg = (f"Can't access branch index {index_of_schema} " + f"for union with {len(writer_schema.schemas)} branches") raise SchemaResolutionException(fail_msg, writer_schema) selected_writer_schema = writer_schema.schemas[index_of_schema] @@ -421,31 +400,29 @@ def read_union(self, writer_schema, decoder): def skip_union(self, writer_schema, decoder): index_of_schema = int(decoder.read_long()) if index_of_schema >= len(writer_schema.schemas): - fail_msg = "Can't access branch index %d for union with %d branches" \ - % (index_of_schema, len(writer_schema.schemas)) + fail_msg = (f"Can't access branch index {index_of_schema} " + f"for union with {len(writer_schema.schemas)} branches") raise SchemaResolutionException(fail_msg, writer_schema) return self.skip_data(writer_schema.schemas[index_of_schema], decoder) + # A record is encoded by encoding the values of its fields + # in the order that they are declared. In other words, a record + # is encoded as just the concatenation of the encodings of its fields. + # Field values are encoded per their schema. + + # Schema Resolution: + # * the ordering of fields may be different: fields are matched by name. + # * schemas for fields with the same name in both records are resolved + # recursively. + # * if the writer's record contains a field with a name not present in the + # reader's record, the writer's value for that field is ignored. + # * if the reader's record schema has a field that contains a default value, + # and writer's schema does not have a field with the same name, then the + # reader should use the default value from its field. + # * if the reader's record schema has a field with no default value, and + # writer's schema does not have a field with the same name, then the + # field's value is unset. def read_record(self, writer_schema, decoder): - """ - A record is encoded by encoding the values of its fields - in the order that they are declared. In other words, a record - is encoded as just the concatenation of the encodings of its fields. - Field values are encoded per their schema. - - Schema Resolution: - * the ordering of fields may be different: fields are matched by name. - * schemas for fields with the same name in both records are resolved - recursively. - * if the writer's record contains a field with a name not present in the - reader's record, the writer's value for that field is ignored. - * if the reader's record schema has a field that contains a default value, - and writer's schema does not have a field with the same name, then the - reader should use the default value from its field. - * if the reader's record schema has a field with no default value, and - writer's schema does not have a field with the same name, then the - field's value is unset. - """ # schema resolution read_record = {} for field in writer_schema.fields: @@ -456,9 +433,3 @@ def read_record(self, writer_schema, decoder): def skip_record(self, writer_schema, decoder): for field in writer_schema.fields: self.skip_data(field.type, decoder) - - -# ------------------------------------------------------------------------------ - -if __name__ == '__main__': - raise Exception('Not a standalone module') diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/avro_io_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/avro_io_async.py index e9812163795f..8688661b5add 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/avro_io_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/avro_io_async.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-missing-return, docstring-missing-rtype """Input/output utilities. @@ -59,10 +60,9 @@ def reader(self): async def read(self, n): """Read n bytes. - Args: - n: Number of bytes to read. - Returns: - The next n bytes from the input. + :param int n: Number of bytes to read. + :returns: The next n bytes from the input. + :rtype: bytes """ assert (n >= 0), n input_bytes = await self.reader.read(n) @@ -88,7 +88,7 @@ async def read_boolean(self): return True if b == 0: return False - fail_msg = "Invalid value for boolean: %s" % b + fail_msg = f"Invalid value for boolean: {b}" raise schema.AvroException(fail_msg) async def read_int(self): @@ -237,7 +237,7 @@ async def read_data(self, writer_schema, decoder): elif writer_schema.type in ['record', 'error', 'request']: result = await self.read_record(writer_schema, decoder) else: - fail_msg = "Cannot read unknown schema type: %s" % writer_schema.type + fail_msg = f"Cannot read unknown schema type: {writer_schema.type}" raise schema.AvroException(fail_msg) return result @@ -274,33 +274,26 @@ async def skip_data(self, writer_schema, decoder): await self.skip_record(writer_schema, decoder) result = None else: - fail_msg = "Unknown schema type: %s" % writer_schema.type + fail_msg = f"Unknown schema type: {writer_schema.type}" raise schema.AvroException(fail_msg) return result + # Fixed instances are encoded using the number of bytes declared in the schema. @staticmethod async def read_fixed(writer_schema, decoder): - """ - Fixed instances are encoded using the number of bytes declared - in the schema. - """ return await decoder.read(writer_schema.size) @staticmethod async def skip_fixed(writer_schema, decoder): return await decoder.skip(writer_schema.size) + # An enum is encoded by a int, representing the zero-based position of the symbol in the schema. @staticmethod async def read_enum(writer_schema, decoder): - """ - An enum is encoded by a int, representing the zero-based position - of the symbol in the schema. - """ # read data index_of_symbol = await decoder.read_int() if index_of_symbol >= len(writer_schema.symbols): - fail_msg = "Can't access enum index %d for enum with %d symbols" \ - % (index_of_symbol, len(writer_schema.symbols)) + fail_msg = f"Can't access enum index {index_of_symbol} for enum with {len(writer_schema.symbols)} symbols" raise SchemaResolutionException(fail_msg, writer_schema) read_symbol = writer_schema.symbols[index_of_symbol] return read_symbol @@ -309,21 +302,15 @@ async def read_enum(writer_schema, decoder): async def skip_enum(decoder): return await decoder.skip_int() + # Arrays are encoded as a series of blocks. + + # Each block consists of a long count value, followed by that many array items. + # A block with count zero indicates the end of the array. Each item is encoded per the array's item schema. + + # If a block's count is negative, then the count is followed immediately by a long block size, + # indicating the number of bytes in the block. + # The actual count in this case is the absolute value of the count written. async def read_array(self, writer_schema, decoder): - """ - Arrays are encoded as a series of blocks. - - Each block consists of a long count value, - followed by that many array items. - A block with count zero indicates the end of the array. - Each item is encoded per the array's item schema. - - If a block's count is negative, - then the count is followed immediately by a long block size, - indicating the number of bytes in the block. - The actual count in this case - is the absolute value of the count written. - """ read_items = [] block_count = await decoder.read_long() while block_count != 0: @@ -346,21 +333,15 @@ async def skip_array(self, writer_schema, decoder): await self.skip_data(writer_schema.items, decoder) block_count = await decoder.read_long() + # Maps are encoded as a series of blocks. + + # Each block consists of a long count value, followed by that many key/value pairs. + # A block with count zero indicates the end of the map. Each item is encoded per the map's value schema. + + # If a block's count is negative, then the count is followed immediately by a long block size, + # indicating the number of bytes in the block. + # The actual count in this case is the absolute value of the count written. async def read_map(self, writer_schema, decoder): - """ - Maps are encoded as a series of blocks. - - Each block consists of a long count value, - followed by that many key/value pairs. - A block with count zero indicates the end of the map. - Each item is encoded per the map's value schema. - - If a block's count is negative, - then the count is followed immediately by a long block size, - indicating the number of bytes in the block. - The actual count in this case - is the absolute value of the count written. - """ read_items = {} block_count = await decoder.read_long() while block_count != 0: @@ -385,17 +366,15 @@ async def skip_map(self, writer_schema, decoder): await self.skip_data(writer_schema.values, decoder) block_count = await decoder.read_long() + # A union is encoded by first writing a long value indicating + # the zero-based position within the union of the schema of its value. + # The value is then encoded per the indicated schema within the union. async def read_union(self, writer_schema, decoder): - """ - A union is encoded by first writing a long value indicating - the zero-based position within the union of the schema of its value. - The value is then encoded per the indicated schema within the union. - """ # schema resolution index_of_schema = int(await decoder.read_long()) if index_of_schema >= len(writer_schema.schemas): - fail_msg = "Can't access branch index %d for union with %d branches" \ - % (index_of_schema, len(writer_schema.schemas)) + fail_msg = (f"Can't access branch index {index_of_schema} " + f"for union with {len(writer_schema.schemas)} branches") raise SchemaResolutionException(fail_msg, writer_schema) selected_writer_schema = writer_schema.schemas[index_of_schema] @@ -405,31 +384,29 @@ async def read_union(self, writer_schema, decoder): async def skip_union(self, writer_schema, decoder): index_of_schema = int(await decoder.read_long()) if index_of_schema >= len(writer_schema.schemas): - fail_msg = "Can't access branch index %d for union with %d branches" \ - % (index_of_schema, len(writer_schema.schemas)) + fail_msg = (f"Can't access branch index {index_of_schema} " + f"for union with {len(writer_schema.schemas)} branches") raise SchemaResolutionException(fail_msg, writer_schema) return await self.skip_data(writer_schema.schemas[index_of_schema], decoder) + # A record is encoded by encoding the values of its fields + # in the order that they are declared. In other words, a record + # is encoded as just the concatenation of the encodings of its fields. + # Field values are encoded per their schema. + + # Schema Resolution: + # * the ordering of fields may be different: fields are matched by name. + # * schemas for fields with the same name in both records are resolved + # recursively. + # * if the writer's record contains a field with a name not present in the + # reader's record, the writer's value for that field is ignored. + # * if the reader's record schema has a field that contains a default value, + # and writer's schema does not have a field with the same name, then the + # reader should use the default value from its field. + # * if the reader's record schema has a field with no default value, and + # writer's schema does not have a field with the same name, then the + # field's value is unset. async def read_record(self, writer_schema, decoder): - """ - A record is encoded by encoding the values of its fields - in the order that they are declared. In other words, a record - is encoded as just the concatenation of the encodings of its fields. - Field values are encoded per their schema. - - Schema Resolution: - * the ordering of fields may be different: fields are matched by name. - * schemas for fields with the same name in both records are resolved - recursively. - * if the writer's record contains a field with a name not present in the - reader's record, the writer's value for that field is ignored. - * if the reader's record schema has a field that contains a default value, - and writer's schema does not have a field with the same name, then the - reader should use the default value from its field. - * if the reader's record schema has a field with no default value, and - writer's schema does not have a field with the same name, then the - field's value is unset. - """ # schema resolution read_record = {} for field in writer_schema.fields: @@ -440,9 +417,3 @@ async def read_record(self, writer_schema, decoder): async def skip_record(self, writer_schema, decoder): for field in writer_schema.fields: await self.skip_data(field.type, decoder) - - -# ------------------------------------------------------------------------------ - -if __name__ == '__main__': - raise Exception('Not a standalone module') diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/datafile.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/datafile.py index df06fe0cfe76..757e0329cd07 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/datafile.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/datafile.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-missing-return, docstring-missing-rtype """Read/Write Avro File Object Containers.""" @@ -102,7 +103,7 @@ def __init__(self, reader, datum_reader, **kwargs): else: self.codec = avro_codec_raw.decode('utf-8') if self.codec not in VALID_CODECS: - raise DataFileException('Unknown codec: %s.' % self.codec) + raise DataFileException(f"Unknown codec: {self.codec}.") # get ready to read self._block_count = 0 @@ -166,10 +167,9 @@ def block_count(self): def get_meta(self, key): """Reports the value of a given metadata key. - Args: - key: Metadata key (string) to report the value of. - Returns: - Value associated to the metadata key, as bytes. + :param str key: Metadata key to report the value of. + :returns: Value associated to the metadata key, as bytes. + :rtype: bytes """ return self._meta.get(key) @@ -185,8 +185,7 @@ def _read_header(self): # check magic number if header.get('magic') != MAGIC: - fail_msg = "Not an Avro data file: %s doesn't match %s." \ - % (header.get('magic'), MAGIC) + fail_msg = f"Not an Avro data file: {header.get('magic')} doesn't match {MAGIC!r}." raise schema.AvroException(fail_msg) # set metadata @@ -210,7 +209,7 @@ def _read_block_header(self): uncompressed = zlib.decompress(data, -15) self._datum_decoder = avro_io.BinaryDecoder(io.BytesIO(uncompressed)) else: - raise DataFileException("Unknown codec: %r" % self.codec) + raise DataFileException(f"Unknown codec: {self.codec!r}") def _skip_sync(self): """ @@ -253,14 +252,6 @@ def __next__(self): return datum - # PY2 - def next(self): - return self.__next__() - def close(self): """Close this reader.""" self.reader.close() - - -if __name__ == '__main__': - raise Exception('Not a standalone module') diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/datafile_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/datafile_async.py index 1e9d018228de..85dc5cb582b3 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/datafile_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/datafile_async.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-missing-return, docstring-missing-rtype """Read/Write Avro File Object Containers.""" @@ -64,7 +65,7 @@ async def init(self): else: self.codec = avro_codec_raw.decode('utf-8') if self.codec not in VALID_CODECS: - raise DataFileException('Unknown codec: %s.' % self.codec) + raise DataFileException(f"Unknown codec: {self.codec}.") # get ready to read self._block_count = 0 @@ -127,10 +128,9 @@ def block_count(self): def get_meta(self, key): """Reports the value of a given metadata key. - Args: - key: Metadata key (string) to report the value of. - Returns: - Value associated to the metadata key, as bytes. + :param str key: Metadata key to report the value of. + :returns: Value associated to the metadata key, as bytes. + :rtype: bytes """ return self._meta.get(key) @@ -146,8 +146,7 @@ async def _read_header(self): # check magic number if header.get('magic') != MAGIC: - fail_msg = "Not an Avro data file: %s doesn't match %s." \ - % (header.get('magic'), MAGIC) + fail_msg = f"Not an Avro data file: {header.get('magic')} doesn't match {MAGIC!r}." raise schema.AvroException(fail_msg) # set metadata @@ -163,7 +162,7 @@ async def _read_block_header(self): await self.raw_decoder.skip_long() self._datum_decoder = self._raw_decoder else: - raise DataFileException("Unknown codec: %r" % self.codec) + raise DataFileException(f"Unknown codec: {self.codec!r}") async def _skip_sync(self): """ @@ -209,7 +208,3 @@ async def __anext__(self): def close(self): """Close this reader.""" self.reader.close() - - -if __name__ == '__main__': - raise Exception('Not a standalone module') diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/schema.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/schema.py index d496f6457568..d5484abcdd9d 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/schema.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/avro/schema.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=too-many-lines +# pylint: disable=docstring-missing-return, docstring-missing-rtype, too-many-lines """Representation of Avro schemas. @@ -25,11 +25,10 @@ - Null. """ -from abc import ABCMeta, abstractmethod +import abc import json import logging import re - logger = logging.getLogger(__name__) # ------------------------------------------------------------------------------ @@ -131,7 +130,7 @@ class SchemaParseException(AvroException): """Error while parsing a JSON schema descriptor.""" -class Schema(metaclass=ABCMeta): +class Schema(metaclass=abc.ABCMeta): """Abstract base class for all Schema classes.""" def __init__(self, data_type, other_props=None): @@ -142,7 +141,7 @@ def __init__(self, data_type, other_props=None): other_props: Optional dictionary of additional properties. """ if data_type not in VALID_TYPES: - raise SchemaParseException('%r is not a valid Avro type.' % data_type) + raise SchemaParseException(f'{data_type!r} is not a valid Avro type.') # All properties of this schema, as a map: property name -> property value self._props = {} @@ -189,15 +188,13 @@ def __str__(self): """Returns: the JSON representation of this schema.""" return json.dumps(self.to_json(names=None)) - @abstractmethod - def to_json(self, names): - """Converts the schema object into its AVRO specification representation. + # Converts the schema object into its AVRO specification representation. - Schema types that have names (records, enums, and fixed) must - be aware of not re-defining schemas that are already listed - in the parameter names. - """ - raise Exception('Cannot run abstract method.') + # Schema types that have names (records, enums, and fixed) must be aware of not + # re-defining schemas that are already listed in the parameter names. + @abc.abstractmethod + def to_json(self, names): + ... # ------------------------------------------------------------------------------ @@ -234,7 +231,7 @@ def __init__(self, name, namespace=None): match = _RE_FULL_NAME.match(self._fullname) if match is None: raise SchemaParseException( - 'Invalid absolute schema name: %r.' % self._fullname) + f'Invalid absolute schema name: {self._fullname!r}.') self._name = match.group(1) self._namespace = self._fullname[:-(len(self._name) + 1)] @@ -245,13 +242,12 @@ def __init__(self, name, namespace=None): self._namespace = namespace self._fullname = (self._name if (not self._namespace) else - '%s.%s' % (self._namespace, self._name)) + f'{self._namespace}.{self._name}') # Validate the fullname: if _RE_FULL_NAME.match(self._fullname) is None: - raise SchemaParseException( - 'Invalid schema name %r infered from name %r and namespace %r.' - % (self._fullname, self._name, self._namespace)) + raise SchemaParseException(f"Invalid schema name {self._fullname!r} inferred from " + f"name {self._name!r} and namespace {self._namespace!r}.") def __eq__(self, other): if not isinstance(other, Name): @@ -277,7 +273,7 @@ def fullname(self): # ------------------------------------------------------------------------------ -class Names: +class Names(object): """Tracks Avro named schemas and default namespace during parsing.""" def __init__(self, default_namespace=None, names=None): @@ -305,21 +301,19 @@ def default_namespace(self): def new_with_default_namespace(self, namespace): """Creates a new name tracker from this tracker, but with a new default ns. - Args: - namespace: New default namespace to use. - Returns: - New name tracker with the specified default namespace. + :param Any namespace: New default namespace to use. + :returns: New name tracker with the specified default namespace. + :rtype: Names """ return Names(names=self._names, default_namespace=namespace) def get_name(self, name, namespace=None): """Resolves the Avro name according to this name tracker's state. - Args: - name: Name to resolve (absolute or relative). - namespace: Optional explicit namespace. - Returns: - The specified name, resolved according to this tracker. + :param Any name: Name to resolve (absolute or relative). + :param Optional[Any] namespace: Optional explicit namespace. + :returns: The specified name, resolved according to this tracker. + :rtype: Name """ if namespace is None: namespace = self._default_namespace @@ -328,19 +322,16 @@ def get_name(self, name, namespace=None): def get_schema(self, name, namespace=None): """Resolves an Avro schema by name. - Args: - name: Name (relative or absolute) of the Avro schema to look up. - namespace: Optional explicit namespace. - Returns: - The schema with the specified name, if any, or None. + :param Any name: Name (absolute or relative) of the Avro schema to look up. + :param Optional[Any] namespace: Optional explicit namespace. + :returns: The schema with the specified name, if any, or None + :rtype: Union[Any, None] """ avro_name = self.get_name(name=name, namespace=namespace) return self._names.get(avro_name.fullname, None) + # Given a properties, return properties with namespace removed if it matches the own default namespace def prune_namespace(self, properties): - """given a properties, return properties with namespace removed if - it matches the own default namespace - """ if self.default_namespace is None: # I have no default -- no change return properties @@ -358,15 +349,14 @@ def prune_namespace(self, properties): def register(self, schema): """Registers a new named schema in this tracker. - Args: - schema: Named Avro schema to register in this tracker. + :param Any schema: Named Avro schema to register in this tracker. """ if schema.fullname in VALID_TYPES: raise SchemaParseException( - '%s is a reserved type name.' % schema.fullname) + f'{schema.fullname} is a reserved type name.') if schema.fullname in self.names: raise SchemaParseException( - 'Avro name %r already exists.' % schema.fullname) + f'Avro name {schema.fullname!r} already exists.') logger.log(DEBUG_VERBOSE, 'Register new name for %r', schema.fullname) self._names[schema.fullname] = schema @@ -398,7 +388,7 @@ def __init__( names: Tracker to resolve and register Avro names. other_props: Optional map of additional properties of the schema. """ - assert (data_type in NAMED_TYPES), ('Invalid named type: %r' % data_type) + assert (data_type in NAMED_TYPES), (f'Invalid named type: {data_type!r}') self._avro_name = names.get_name(name=name, namespace=namespace) super(NamedSchema, self).__init__(data_type, other_props) @@ -429,24 +419,21 @@ def fullname(self): def name_ref(self, names): """Reports this schema name relative to the specified name tracker. - Args: - names: Avro name tracker to relativise this schema name against. - Returns: - This schema name, relativised against the specified name tracker. + :param Any names: Avro name tracker to relativize this schema name against. + :returns: This schema name, relativized against the specified name tracker. + :rtype: Any """ if self.namespace == names.default_namespace: return self.name return self.fullname - @abstractmethod - def to_json(self, names): - """Converts the schema object into its AVRO specification representation. + # Converts the schema object into its AVRO specification representation. - Schema types that have names (records, enums, and fixed) must - be aware of not re-defining schemas that are already listed - in the parameter names. - """ - raise Exception('Cannot run abstract method.') + # Schema types that have names (records, enums, and fixed) must be aware + # of not re-defining schemas that are already listed in the parameter names. + @abc.abstractmethod + def to_json(self, names): + ... # ------------------------------------------------------------------------------ @@ -481,9 +468,9 @@ def __init__( other_props: """ if (not isinstance(name, str)) or (not name): - raise SchemaParseException('Invalid record field name: %r.' % name) + raise SchemaParseException(f'Invalid record field name: {name!r}.') if (order is not None) and (order not in VALID_FIELD_SORT_ORDERS): - raise SchemaParseException('Invalid record field order: %r.' % order) + raise SchemaParseException(f'Invalid record field order: {order!r}.') # All properties of this record field: self._props = {} @@ -576,7 +563,7 @@ def __init__(self, data_type, other_props=None): data_type: Type of the schema to construct. Must be primitive. """ if data_type not in PRIMITIVE_TYPES: - raise AvroException('%r is not a valid primitive type.' % data_type) + raise AvroException(f'{data_type!r} is not a valid primitive type.') super(PrimitiveSchema, self).__init__(data_type, other_props=other_props) @property @@ -672,7 +659,7 @@ def __init__( if (len(symbol_set) != len(symbols) or not all(map(lambda symbol: isinstance(symbol, str), symbols))): raise AvroException( - 'Invalid symbols for enum schema: %r.' % (symbols,)) + f'Invalid symbols for enum schema: {symbols!r}.') super(EnumSchema, self).__init__( data_type=ENUM, @@ -801,22 +788,19 @@ def __init__(self, schemas): filter(lambda schema: schema.type in NAMED_TYPES, self._schemas)) unique_names = frozenset(map(lambda schema: schema.fullname, named_branches)) if len(unique_names) != len(named_branches): - raise AvroException( - 'Invalid union branches with duplicate schema name:%s' - % ''.join(map(lambda schema: ('\n\t - %s' % schema), self._schemas))) + schemas = ''.join(map(lambda schema: (f'\n\t - {schema}'), self._schemas)) + raise AvroException(f'Invalid union branches with duplicate schema name:{schemas}') # Types are unique within unnamed schemas, and union is not allowed: unnamed_branches = tuple( filter(lambda schema: schema.type not in NAMED_TYPES, self._schemas)) unique_types = frozenset(map(lambda schema: schema.type, unnamed_branches)) if UNION in unique_types: - raise AvroException( - 'Invalid union branches contain other unions:%s' - % ''.join(map(lambda schema: ('\n\t - %s' % schema), self._schemas))) + schemas = ''.join(map(lambda schema: (f'\n\t - {schema}'), self._schemas)) + raise AvroException(f'Invalid union branches contain other unions:{schemas}') if len(unique_types) != len(unnamed_branches): - raise AvroException( - 'Invalid union branches with duplicate type:%s' - % ''.join(map(lambda schema: ('\n\t - %s' % schema), self._schemas))) + schemas = ''.join(map(lambda schema: (f'\n\t - {schema}'), self._schemas)) + raise AvroException(f'Invalid union branches with duplicate type:{schemas}') @property def schemas(self): @@ -874,11 +858,11 @@ class RecordSchema(NamedSchema): def _make_field(index, field_desc, names): """Builds field schemas from a list of field JSON descriptors. - Args: - index: 0-based index of the field in the record. - field_desc: JSON descriptors of a record field. - Return: - The field schema. + :param int index: 0-based index of the field in the record. + :param Any field_desc: JSON descriptors of a record field. + :param Any names: The names for this schema. + :returns: The field schema. + :rtype: Field """ field_schema = schema_from_json_data( json_data=field_desc['type'], @@ -900,14 +884,12 @@ def _make_field(index, field_desc, names): @staticmethod def make_field_list(field_desc_list, names): """Builds field schemas from a list of field JSON descriptors. - Guarantees field name unicity. - Args: - field_desc_list: collection of field JSON descriptors. - names: Avro schema tracker. - Yields - Field schemas. + :param Any field_desc_list: Collection of field JSON descriptors. + :param Any names: The names for this schema. + :returns: Field schemas. + :rtype: Field """ for index, field_desc in enumerate(field_desc_list): yield RecordSchema._make_field(index, field_desc, names) @@ -915,19 +897,17 @@ def make_field_list(field_desc_list, names): @staticmethod def _make_field_map(fields): """Builds the field map. - Guarantees field name unicity. - Args: - fields: iterable of field schema. - Returns: - A map of field schemas, indexed by name. + :param Any fields: Iterable of field schema. + :returns: A map of field schemas, indexed by name. + :rtype: Dict[Any, Any] """ field_map = {} for field in fields: if field.name in field_map: raise SchemaParseException( - 'Duplicate record field name %r.' % field.name) + f'Duplicate record field name {field.name!r}.') field_map[field.name] = field return field_map @@ -975,8 +955,9 @@ def __init__( ) else: raise SchemaParseException( - 'Invalid record type: %r.' % record_type) + f'Invalid record type: {record_type!r}.') + nested_names = [] if record_type in [RECORD, ERROR]: avro_name = names.get_name(name=name, namespace=namespace) nested_names = names.new_with_default_namespace(namespace=avro_name.namespace) @@ -1032,14 +1013,12 @@ def __eq__(self, that): def filter_keys_out(items, keys): """Filters a collection of (key, value) items. - Exclude any item whose key belongs to keys. - Args: - items: Dictionary of items to filter the keys out of. - keys: Keys to filter out. - Yields: - Filtered items. + :param Dict[Any, Any] items: Dictionary of items to filter the keys out of. + :param Dict[Any, Any] keys: Dictionary of keys to filter the extracted keys against. + :returns: Filtered items. + :rtype: Tuple(Any, Any) """ for key, value in items.items(): if key in keys: @@ -1057,9 +1036,7 @@ def _schema_from_json_string(json_string, names): # Look for a known named schema: schema = names.get_schema(name=json_string) if schema is None: - raise SchemaParseException( - 'Unknown named schema %r, known names: %r.' - % (json_string, sorted(names.names))) + raise SchemaParseException(f"Unknown named schema {json_string!r}, known names: {sorted(names.names)!r}.") return schema @@ -1074,7 +1051,7 @@ def _schema_from_json_object(json_object, names): data_type = json_object.get('type') if data_type is None: raise SchemaParseException( - 'Avro schema JSON descriptor has no "type" property: %r' % json_object) + f'Avro schema JSON descriptor has no "type" property: {json_object!r}') other_props = dict( filter_keys_out(items=json_object, keys=SCHEMA_RESERVED_PROPS)) @@ -1110,7 +1087,7 @@ def MakeFields(names): other_props=other_props, ) else: - raise Exception('Internal error: unknown type %r.' % data_type) + raise ValueError(f'Internal error: unknown type {data_type!r}.') elif data_type in VALID_TYPES: # Unnamed, non-primitive Avro type: @@ -1118,9 +1095,7 @@ def MakeFields(names): if data_type == ARRAY: items_desc = json_object.get('items') if items_desc is None: - raise SchemaParseException( - 'Invalid array schema descriptor with no "items" : %r.' - % json_object) + raise SchemaParseException(f'Invalid array schema descriptor with no "items" : {json_object!r}.') result = ArraySchema( items=schema_from_json_data(items_desc, names), other_props=other_props, @@ -1129,9 +1104,7 @@ def MakeFields(names): elif data_type == MAP: values_desc = json_object.get('values') if values_desc is None: - raise SchemaParseException( - 'Invalid map schema descriptor with no "values" : %r.' - % json_object) + raise SchemaParseException(f'Invalid map schema descriptor with no "values" : {json_object!r}.') result = MapSchema( values=schema_from_json_data(values_desc, names=names), other_props=other_props, @@ -1146,10 +1119,9 @@ def MakeFields(names): result = ErrorUnionSchema(schemas=error_schemas) else: - raise Exception('Internal error: unknown type %r.' % data_type) + raise ValueError(f'Internal error: unknown type {data_type!r}.') else: - raise SchemaParseException( - 'Invalid JSON descriptor for an Avro schema: %r' % json_object) + raise SchemaParseException(f'Invalid JSON descriptor for an Avro schema: {json_object!r}') return result @@ -1163,14 +1135,12 @@ def MakeFields(names): def schema_from_json_data(json_data, names=None): """Builds an Avro Schema from its JSON descriptor. + Raises SchemaParseException if the descriptor is invalid. - Args: - json_data: JSON data representing the descriptor of the Avro schema. - names: Optional tracker for Avro named schemas. - Returns: - The Avro schema parsed from the JSON descriptor. - Raises: - SchemaParseException: if the descriptor is invalid. + :param Any json_data: JSON data representing the descriptor of the Avro schema. + :param Any names: Optional tracker for Avro named schemas. + :returns: The Avro schema parsed from the JSON descriptor. + :rtype: Any """ if names is None: names = Names() @@ -1179,7 +1149,7 @@ def schema_from_json_data(json_data, names=None): parser = _JSONDataParserTypeMap.get(type(json_data)) if parser is None: raise SchemaParseException( - 'Invalid JSON descriptor for an Avro schema: %r.' % json_data) + f'Invalid JSON descriptor for an Avro schema: {json_data!r}.') return parser(json_data, names=names) @@ -1188,22 +1158,18 @@ def schema_from_json_data(json_data, names=None): def parse(json_string): """Constructs a Schema from its JSON descriptor in text form. + Raises SchemaParseException if a JSON parsing error is met, or if the JSON descriptor is invalid. - Args: - json_string: String representation of the JSON descriptor of the schema. - Returns: - The parsed schema. - Raises: - SchemaParseException: on JSON parsing error, - or if the JSON descriptor is invalid. + :param str json_string: String representation of the JSON descriptor of the schema. + :returns: The parsed schema. + :rtype: Any """ try: json_data = json.loads(json_string) except Exception as exn: raise SchemaParseException( - 'Error parsing schema from JSON: %r. ' - 'Error message: %r.' - % (json_string, exn)) + f'Error parsing schema from JSON: {json_string!r}. ' + f'Error message: {exn!r}.') from exn # Initialize the names object names = Names() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/base_client.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/base_client.py index 136284bd0ef7..9dc8d2ec3cc4 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/base_client.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/base_client.py @@ -3,92 +3,90 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - -from typing import ( # pylint: disable=unused-import - Union, - Optional, +import logging +import uuid +from typing import ( Any, - Iterable, + cast, Dict, - List, - Type, + Iterator, + Optional, Tuple, TYPE_CHECKING, + Union, ) -import logging - -try: - from urllib.parse import parse_qs, quote -except ImportError: - from urlparse import parse_qs # type: ignore - from urllib2 import quote # type: ignore - -import six +from urllib.parse import parse_qs, quote -from azure.core.configuration import Configuration -from azure.core.credentials import AzureSasCredential +from azure.core.credentials import AzureSasCredential, AzureNamedKeyCredential, TokenCredential from azure.core.exceptions import HttpResponseError from azure.core.pipeline import Pipeline -from azure.core.pipeline.transport import RequestsTransport, HttpTransport +from azure.core.pipeline.transport import HttpTransport, RequestsTransport # pylint: disable=non-abstract-transport-import, no-name-in-module from azure.core.pipeline.policies import ( - RedirectPolicy, + AzureSasCredentialPolicy, ContentDecodePolicy, - BearerTokenCredentialPolicy, - ProxyPolicy, DistributedTracingPolicy, HttpLoggingPolicy, + ProxyPolicy, + RedirectPolicy, UserAgentPolicy, - AzureSasCredentialPolicy ) -from .constants import STORAGE_OAUTH_SCOPE, SERVICE_HOST_BASE, CONNECTION_TIMEOUT, READ_TIMEOUT -from .models import LocationMode from .authentication import SharedKeyCredentialPolicy -from .shared_access_signature import QueryStringConstants +from .constants import CONNECTION_TIMEOUT, DEFAULT_OAUTH_SCOPE, READ_TIMEOUT, SERVICE_HOST_BASE, STORAGE_OAUTH_SCOPE +from .models import LocationMode, StorageConfiguration from .policies import ( - StorageHeadersPolicy, + ExponentialRetry, + QueueMessagePolicy, + StorageBearerTokenCredentialPolicy, StorageContentValidation, + StorageHeadersPolicy, + StorageHosts, + StorageLoggingPolicy, StorageRequestHook, StorageResponseHook, - StorageLoggingPolicy, - StorageHosts, - QueueMessagePolicy, - ExponentialRetry, ) +from .request_handlers import serialize_batch_body, _get_batch_request_delimiter +from .response_handlers import PartialBatchErrorException, process_storage_error +from .shared_access_signature import QueryStringConstants from .._version import VERSION -from .response_handlers import process_storage_error, PartialBatchErrorException +from .._shared_access_signature import _is_credential_sastoken +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + from azure.core.pipeline.transport import HttpRequest, HttpResponse # pylint: disable=C4756 _LOGGER = logging.getLogger(__name__) _SERVICE_PARAMS = { - "blob": {"primary": "BlobEndpoint", "secondary": "BlobSecondaryEndpoint"}, - "queue": {"primary": "QueueEndpoint", "secondary": "QueueSecondaryEndpoint"}, - "file": {"primary": "FileEndpoint", "secondary": "FileSecondaryEndpoint"}, - "dfs": {"primary": "BlobEndpoint", "secondary": "BlobEndpoint"}, + "blob": {"primary": "BLOBENDPOINT", "secondary": "BLOBSECONDARYENDPOINT"}, + "queue": {"primary": "QUEUEENDPOINT", "secondary": "QUEUESECONDARYENDPOINT"}, + "file": {"primary": "FILEENDPOINT", "secondary": "FILESECONDARYENDPOINT"}, + "dfs": {"primary": "BLOBENDPOINT", "secondary": "BLOBENDPOINT"}, } -class StorageAccountHostsMixin(object): # pylint: disable=too-many-instance-attributes +class StorageAccountHostsMixin(object): + _client: Any def __init__( self, - parsed_url, # type: Any - service, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None + parsed_url: Any, + service: str, + credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "AsyncTokenCredential", TokenCredential]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: self._location_mode = kwargs.get("_location_mode", LocationMode.PRIMARY) self._hosts = kwargs.get("_hosts") self.scheme = parsed_url.scheme + self._is_localhost = False if service not in ["blob", "queue", "file-share", "dfs"]: - raise ValueError("Invalid service: {}".format(service)) + raise ValueError(f"Invalid service: {service}") service_name = service.split('-')[0] - account = parsed_url.netloc.split(".{}.core.".format(service_name)) + account = parsed_url.netloc.split(f".{service_name}.core.") self.account_name = account[0] if len(account) > 1 else None if not self.account_name and parsed_url.netloc.startswith("localhost") \ or parsed_url.netloc.startswith("127.0.0.1"): + self._is_localhost = True self.account_name = parsed_url.path.strip("/") self.credential = _format_shared_key_credential(self.account_name, credential) @@ -98,8 +96,7 @@ def __init__( secondary_hostname = None if hasattr(self.credential, "account_name"): self.account_name = self.credential.account_name - secondary_hostname = "{}-secondary.{}.{}".format( - self.credential.account_name, service_name, SERVICE_HOST_BASE) + secondary_hostname = f"{self.credential.account_name}-secondary.{service_name}.{SERVICE_HOST_BASE}" if not self._hosts: if len(account) > 1: @@ -109,10 +106,8 @@ def __init__( primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip('/') self._hosts = {LocationMode.PRIMARY: primary_hostname, LocationMode.SECONDARY: secondary_hostname} - self.require_encryption = kwargs.get("require_encryption", False) - self.key_encryption_key = kwargs.get("key_encryption_key") - self.key_resolver_function = kwargs.get("key_resolver_function") - self._config, self._pipeline = self._create_pipeline(self.credential, storage_sdk=service, **kwargs) + self._sdk_moniker = f"storage-{service}/{VERSION}" + self._config, self._pipeline = self._create_pipeline(self.credential, sdk_moniker=self._sdk_moniker, **kwargs) def __enter__(self): self._client.__enter__() @@ -133,6 +128,8 @@ def url(self): This could be either the primary endpoint, or the secondary endpoint depending on the current :func:`location_mode`. + :returns: The full endpoint URL to this entity, including SAS token if used. + :rtype: str """ return self._format_url(self._hosts[self._location_mode]) @@ -140,7 +137,7 @@ def url(self): def primary_endpoint(self): """The full primary endpoint URL. - :type: str + :rtype: str """ return self._format_url(self._hosts[LocationMode.PRIMARY]) @@ -148,7 +145,7 @@ def primary_endpoint(self): def primary_hostname(self): """The hostname of the primary endpoint. - :type: str + :rtype: str """ return self._hosts[LocationMode.PRIMARY] @@ -159,7 +156,7 @@ def secondary_endpoint(self): If not available a ValueError will be raised. To explicitly specify a secondary hostname, use the optional `secondary_hostname` keyword argument on instantiation. - :type: str + :rtype: str :raise ValueError: """ if not self._hosts[LocationMode.SECONDARY]: @@ -173,7 +170,7 @@ def secondary_hostname(self): If not available this will be None. To explicitly specify a secondary hostname, use the optional `secondary_hostname` keyword argument on instantiation. - :type: str or None + :rtype: Optional[str] """ return self._hosts[LocationMode.SECONDARY] @@ -183,7 +180,7 @@ def location_mode(self): By default this will be "primary". Options include "primary" and "secondary". - :type: str + :rtype: str """ return self._location_mode @@ -194,52 +191,64 @@ def location_mode(self, value): self._location_mode = value self._client._config.url = self.url # pylint: disable=protected-access else: - raise ValueError("No host URL for location mode: {}".format(value)) + raise ValueError(f"No host URL for location mode: {value}") @property def api_version(self): """The version of the Storage API used for requests. - :type: str + :rtype: str """ return self._client._config.version # pylint: disable=protected-access - def _format_query_string(self, sas_token, credential, snapshot=None, share_snapshot=None): + def _format_query_string( + self, sas_token: Optional[str], + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", TokenCredential]], # pylint: disable=line-too-long + snapshot: Optional[str] = None, + share_snapshot: Optional[str] = None + ) -> Tuple[str, Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", TokenCredential]]]: # pylint: disable=line-too-long query_str = "?" if snapshot: - query_str += "snapshot={}&".format(self.snapshot) + query_str += f"snapshot={snapshot}&" if share_snapshot: - query_str += "sharesnapshot={}&".format(self.snapshot) + query_str += f"sharesnapshot={share_snapshot}&" if sas_token and isinstance(credential, AzureSasCredential): raise ValueError( "You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature.") - if sas_token and not credential: - query_str += sas_token - elif is_credential_sastoken(credential): + if _is_credential_sastoken(credential): + credential = cast(str, credential) query_str += credential.lstrip("?") credential = None + elif sas_token: + query_str += sas_token return query_str.rstrip("?&"), credential - def _create_pipeline(self, credential, **kwargs): - # type: (Any, **Any) -> Tuple[Configuration, Pipeline] - self._credential_policy = None + def _create_pipeline( + self, credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Tuple[StorageConfiguration, Pipeline]: + self._credential_policy: Any = None if hasattr(credential, "get_token"): - self._credential_policy = BearerTokenCredentialPolicy(credential, STORAGE_OAUTH_SCOPE) + if kwargs.get('audience'): + audience = str(kwargs.pop('audience')).rstrip('/') + DEFAULT_OAUTH_SCOPE + else: + audience = STORAGE_OAUTH_SCOPE + self._credential_policy = StorageBearerTokenCredentialPolicy(cast(TokenCredential, credential), audience) elif isinstance(credential, SharedKeyCredentialPolicy): self._credential_policy = credential elif isinstance(credential, AzureSasCredential): self._credential_policy = AzureSasCredentialPolicy(credential) elif credential is not None: - raise TypeError("Unsupported credential: {}".format(type(credential))) + raise TypeError(f"Unsupported credential: {type(credential)}") config = kwargs.get("_configuration") or create_configuration(**kwargs) if kwargs.get("_pipeline"): return config, kwargs["_pipeline"] - config.transport = kwargs.get("transport") # type: ignore + transport = kwargs.get("transport") kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT) kwargs.setdefault("read_timeout", READ_TIMEOUT) - if not config.transport: - config.transport = RequestsTransport(**kwargs) + if not transport: + transport = RequestsTransport(**kwargs) policies = [ QueueMessagePolicy(), config.proxy_policy, @@ -258,26 +267,34 @@ def _create_pipeline(self, credential, **kwargs): HttpLoggingPolicy(**kwargs) ] if kwargs.get("_additional_pipeline_policies"): - policies = policies + kwargs.get("_additional_pipeline_policies") - return config, Pipeline(config.transport, policies=policies) + policies = policies + kwargs.get("_additional_pipeline_policies") # type: ignore + config.transport = transport # type: ignore + return config, Pipeline(transport, policies=policies) def _batch_send( - self, *reqs, # type: HttpRequest - **kwargs - ): + self, + *reqs: "HttpRequest", + **kwargs: Any + ) -> Iterator["HttpResponse"]: """Given a series of request, do a Storage batch call. + + :param HttpRequest reqs: A collection of HttpRequest objects. + :returns: An iterator of HttpResponse objects. + :rtype: Iterator[HttpResponse] """ # Pop it here, so requests doesn't feel bad about additional kwarg raise_on_any_failure = kwargs.pop("raise_on_any_failure", True) + batch_id = str(uuid.uuid1()) + request = self._client._client.post( # pylint: disable=protected-access - url='{}://{}/?comp=batch{}{}'.format( - self.scheme, - self.primary_hostname, - kwargs.pop('sas', ""), - kwargs.pop('timeout', "") + url=( + f'{self.scheme}://{self.primary_hostname}/' + f"{kwargs.pop('path', '')}?{kwargs.pop('restype', '')}" + f"comp=batch{kwargs.pop('sas', '')}{kwargs.pop('timeout', '')}" ), headers={ - 'x-ms-version': self.api_version + 'x-ms-version': self.api_version, + "Content-Type": "multipart/mixed; boundary=" + _get_batch_request_delimiter(batch_id, False, False) } ) @@ -291,10 +308,17 @@ def _batch_send( enforce_https=False ) + Pipeline._prepare_multipart_mixed_request(request) # pylint: disable=protected-access + body = serialize_batch_body(request.multipart_mixed_info[0], batch_id) + request.set_bytes_body(body) + + temp = request.multipart_mixed_info + request.multipart_mixed_info = None pipeline_response = self._pipeline.run( request, **kwargs ) response = pipeline_response.http_response + request.multipart_mixed_info = temp try: if response.status_code not in [202]: @@ -309,10 +333,11 @@ def _batch_send( ) raise error return iter(parts) - return parts + return parts # type: ignore [no-any-return] except HttpResponseError as error: process_storage_error(error) + class TransportWrapper(HttpTransport): """Wrapper class that ensures that an inner client created by a `get_client` method does not close the outer transport for the parent @@ -333,12 +358,15 @@ def close(self): def __enter__(self): pass - def __exit__(self, *args): # pylint: disable=arguments-differ + def __exit__(self, *args): pass -def _format_shared_key_credential(account_name, credential): - if isinstance(credential, six.string_types): +def _format_shared_key_credential( + account_name: Optional[str], + credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "AsyncTokenCredential", TokenCredential]] = None # pylint: disable=line-too-long +) -> Any: + if isinstance(credential, str): if not account_name: raise ValueError("Unable to determine account name for shared key credential.") credential = {"account_name": account_name, "account_key": credential} @@ -348,23 +376,29 @@ def _format_shared_key_credential(account_name, credential): if "account_key" not in credential: raise ValueError("Shared key credential missing 'account_key") return SharedKeyCredentialPolicy(**credential) + if isinstance(credential, AzureNamedKeyCredential): + return SharedKeyCredentialPolicy(credential.named_key.name, credential.named_key.key) return credential -def parse_connection_str(conn_str, credential, service): +def parse_connection_str( + conn_str: str, + credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]], + service: str +) -> Tuple[str, Optional[str], Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]]]: # pylint: disable=line-too-long conn_str = conn_str.rstrip(";") - conn_settings = [s.split("=", 1) for s in conn_str.split(";")] - if any(len(tup) != 2 for tup in conn_settings): + conn_settings_list = [s.split("=", 1) for s in conn_str.split(";")] + if any(len(tup) != 2 for tup in conn_settings_list): raise ValueError("Connection string is either blank or malformed.") - conn_settings = dict(conn_settings) + conn_settings = dict((key.upper(), val) for key, val in conn_settings_list) endpoints = _SERVICE_PARAMS[service] primary = None secondary = None if not credential: try: - credential = {"account_name": conn_settings["AccountName"], "account_key": conn_settings["AccountKey"]} + credential = {"account_name": conn_settings["ACCOUNTNAME"], "account_key": conn_settings["ACCOUNTKEY"]} except KeyError: - credential = conn_settings.get("SharedAccessSignature") + credential = conn_settings.get("SHAREDACCESSSIGNATURE") if endpoints["primary"] in conn_settings: primary = conn_settings[endpoints["primary"]] if endpoints["secondary"] in conn_settings: @@ -373,77 +407,52 @@ def parse_connection_str(conn_str, credential, service): if endpoints["secondary"] in conn_settings: raise ValueError("Connection string specifies only secondary endpoint.") try: - primary = "{}://{}.{}.{}".format( - conn_settings["DefaultEndpointsProtocol"], - conn_settings["AccountName"], - service, - conn_settings["EndpointSuffix"], + primary =( + f"{conn_settings['DEFAULTENDPOINTSPROTOCOL']}://" + f"{conn_settings['ACCOUNTNAME']}.{service}.{conn_settings['ENDPOINTSUFFIX']}" ) - secondary = "{}-secondary.{}.{}".format( - conn_settings["AccountName"], service, conn_settings["EndpointSuffix"] + secondary = ( + f"{conn_settings['ACCOUNTNAME']}-secondary." + f"{service}.{conn_settings['ENDPOINTSUFFIX']}" ) except KeyError: pass if not primary: try: - primary = "https://{}.{}.{}".format( - conn_settings["AccountName"], service, conn_settings.get("EndpointSuffix", SERVICE_HOST_BASE) + primary = ( + f"https://{conn_settings['ACCOUNTNAME']}." + f"{service}.{conn_settings.get('ENDPOINTSUFFIX', SERVICE_HOST_BASE)}" ) - except KeyError: - raise ValueError("Connection string missing required connection details.") + except KeyError as exc: + raise ValueError("Connection string missing required connection details.") from exc + if service == "dfs": + primary = primary.replace(".blob.", ".dfs.") + if secondary: + secondary = secondary.replace(".blob.", ".dfs.") return primary, secondary, credential -def create_configuration(**kwargs): - # type: (**Any) -> Configuration - config = Configuration(**kwargs) +def create_configuration(**kwargs: Any) -> StorageConfiguration: + # Backwards compatibility if someone is not passing sdk_moniker + if not kwargs.get("sdk_moniker"): + kwargs["sdk_moniker"] = f"storage-{kwargs.pop('storage_sdk')}/{VERSION}" + config = StorageConfiguration(**kwargs) config.headers_policy = StorageHeadersPolicy(**kwargs) - config.user_agent_policy = UserAgentPolicy( - sdk_moniker="storage-{}/{}".format(kwargs.pop('storage_sdk'), VERSION), **kwargs) + config.user_agent_policy = UserAgentPolicy(**kwargs) config.retry_policy = kwargs.get("retry_policy") or ExponentialRetry(**kwargs) config.logging_policy = StorageLoggingPolicy(**kwargs) config.proxy_policy = ProxyPolicy(**kwargs) - - # Storage settings - config.max_single_put_size = kwargs.get("max_single_put_size", 64 * 1024 * 1024) - config.copy_polling_interval = 15 - - # Block blob uploads - config.max_block_size = kwargs.get("max_block_size", 4 * 1024 * 1024) - config.min_large_block_upload_threshold = kwargs.get("min_large_block_upload_threshold", 4 * 1024 * 1024 + 1) - config.use_byte_buffer = kwargs.get("use_byte_buffer", False) - - # Page blob uploads - config.max_page_size = kwargs.get("max_page_size", 4 * 1024 * 1024) - - # Blob downloads - config.max_single_get_size = kwargs.get("max_single_get_size", 32 * 1024 * 1024) - config.max_chunk_get_size = kwargs.get("max_chunk_get_size", 4 * 1024 * 1024) - - # File uploads - config.max_range_size = kwargs.get("max_range_size", 4 * 1024 * 1024) return config -def parse_query(query_str): +def parse_query(query_str: str) -> Tuple[Optional[str], Optional[str]]: sas_values = QueryStringConstants.to_list() parsed_query = {k: v[0] for k, v in parse_qs(query_str).items()} - sas_params = ["{}={}".format(k, quote(v, safe='')) for k, v in parsed_query.items() if k in sas_values] + sas_params = [f"{k}={quote(v, safe='')}" for k, v in parsed_query.items() if k in sas_values] sas_token = None if sas_params: sas_token = "&".join(sas_params) snapshot = parsed_query.get("snapshot") or parsed_query.get("sharesnapshot") return snapshot, sas_token - - -def is_credential_sastoken(credential): - if not credential or not isinstance(credential, six.string_types): - return False - - sas_values = QueryStringConstants.to_list() - parsed_query = parse_qs(credential.lstrip("?")) - if parsed_query and all([k in sas_values for k in parsed_query.keys()]): - return True - return False diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/base_client_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/base_client_async.py index 8834292ad688..6186b29db107 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/base_client_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/base_client_async.py @@ -3,47 +3,51 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# mypy: disable-error-code="attr-defined" -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, Dict, List, Type, Tuple, - TYPE_CHECKING -) import logging +from typing import Any, cast, Dict, Optional, Tuple, TYPE_CHECKING, Union -from azure.core.credentials import AzureSasCredential -from azure.core.pipeline import AsyncPipeline from azure.core.async_paging import AsyncList +from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential +from azure.core.credentials_async import AsyncTokenCredential from azure.core.exceptions import HttpResponseError +from azure.core.pipeline import AsyncPipeline from azure.core.pipeline.policies import ( - ContentDecodePolicy, - AsyncBearerTokenCredentialPolicy, AsyncRedirectPolicy, + AzureSasCredentialPolicy, + ContentDecodePolicy, DistributedTracingPolicy, HttpLoggingPolicy, - AzureSasCredentialPolicy, ) from azure.core.pipeline.transport import AsyncHttpTransport -from .constants import STORAGE_OAUTH_SCOPE, CONNECTION_TIMEOUT, READ_TIMEOUT from .authentication import SharedKeyCredentialPolicy from .base_client import create_configuration +from .constants import CONNECTION_TIMEOUT, DEFAULT_OAUTH_SCOPE, READ_TIMEOUT, SERVICE_HOST_BASE, STORAGE_OAUTH_SCOPE +from .models import StorageConfiguration from .policies import ( + QueueMessagePolicy, StorageContentValidation, - StorageRequestHook, - StorageHosts, StorageHeadersPolicy, - QueueMessagePolicy + StorageHosts, + StorageRequestHook, ) -from .policies_async import AsyncStorageResponseHook - -from .response_handlers import process_storage_error, PartialBatchErrorException +from .policies_async import AsyncStorageBearerTokenCredentialPolicy, AsyncStorageResponseHook +from .response_handlers import PartialBatchErrorException, process_storage_error +from .._shared_access_signature import _is_credential_sastoken if TYPE_CHECKING: - from azure.core.pipeline import Pipeline - from azure.core.pipeline.transport import HttpRequest - from azure.core.configuration import Configuration + from azure.core.pipeline.transport import HttpRequest, HttpResponse # pylint: disable=C4756 _LOGGER = logging.getLogger(__name__) +_SERVICE_PARAMS = { + "blob": {"primary": "BLOBENDPOINT", "secondary": "BLOBSECONDARYENDPOINT"}, + "queue": {"primary": "QUEUEENDPOINT", "secondary": "QUEUESECONDARYENDPOINT"}, + "file": {"primary": "FILEENDPOINT", "secondary": "FILESECONDARYENDPOINT"}, + "dfs": {"primary": "BLOBENDPOINT", "secondary": "BLOBENDPOINT"}, +} + class AsyncStorageAccountHostsMixin(object): @@ -66,64 +70,101 @@ async def close(self): """ await self._client.close() - def _create_pipeline(self, credential, **kwargs): - # type: (Any, **Any) -> Tuple[Configuration, Pipeline] - self._credential_policy = None + def _format_query_string( + self, sas_token: Optional[str], + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", AsyncTokenCredential]], # pylint: disable=line-too-long + snapshot: Optional[str] = None, + share_snapshot: Optional[str] = None + ) -> Tuple[str, Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", AsyncTokenCredential]]]: # pylint: disable=line-too-long + query_str = "?" + if snapshot: + query_str += f"snapshot={snapshot}&" + if share_snapshot: + query_str += f"sharesnapshot={share_snapshot}&" + if sas_token and isinstance(credential, AzureSasCredential): + raise ValueError( + "You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature.") + if _is_credential_sastoken(credential): + query_str += credential.lstrip("?") # type: ignore [union-attr] + credential = None + elif sas_token: + query_str += sas_token + return query_str.rstrip("?&"), credential + + def _create_pipeline( + self, credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Tuple[StorageConfiguration, AsyncPipeline]: + self._credential_policy: Optional[ + Union[AsyncStorageBearerTokenCredentialPolicy, + SharedKeyCredentialPolicy, + AzureSasCredentialPolicy]] = None if hasattr(credential, 'get_token'): - self._credential_policy = AsyncBearerTokenCredentialPolicy(credential, STORAGE_OAUTH_SCOPE) + if kwargs.get('audience'): + audience = str(kwargs.pop('audience')).rstrip('/') + DEFAULT_OAUTH_SCOPE + else: + audience = STORAGE_OAUTH_SCOPE + self._credential_policy = AsyncStorageBearerTokenCredentialPolicy( + cast(AsyncTokenCredential, credential), audience) elif isinstance(credential, SharedKeyCredentialPolicy): self._credential_policy = credential elif isinstance(credential, AzureSasCredential): self._credential_policy = AzureSasCredentialPolicy(credential) elif credential is not None: - raise TypeError("Unsupported credential: {}".format(type(credential))) + raise TypeError(f"Unsupported credential: {type(credential)}") config = kwargs.get('_configuration') or create_configuration(**kwargs) if kwargs.get('_pipeline'): return config, kwargs['_pipeline'] - config.transport = kwargs.get('transport') # type: ignore + transport = kwargs.get('transport') kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT) kwargs.setdefault("read_timeout", READ_TIMEOUT) - if not config.transport: + if not transport: try: - from azure.core.pipeline.transport import AioHttpTransport - except ImportError: - raise ImportError("Unable to create async transport. Please check aiohttp is installed.") - config.transport = AioHttpTransport(**kwargs) + from azure.core.pipeline.transport import AioHttpTransport # pylint: disable=non-abstract-transport-import + except ImportError as exc: + raise ImportError("Unable to create async transport. Please check aiohttp is installed.") from exc + transport = AioHttpTransport(**kwargs) + hosts = self._hosts policies = [ QueueMessagePolicy(), - config.headers_policy, config.proxy_policy, config.user_agent_policy, StorageContentValidation(), - StorageRequestHook(**kwargs), - self._credential_policy, ContentDecodePolicy(response_encoding="utf-8"), AsyncRedirectPolicy(**kwargs), - StorageHosts(hosts=self._hosts, **kwargs), # type: ignore + StorageHosts(hosts=hosts, **kwargs), config.retry_policy, + config.headers_policy, + StorageRequestHook(**kwargs), + self._credential_policy, config.logging_policy, AsyncStorageResponseHook(**kwargs), DistributedTracingPolicy(**kwargs), HttpLoggingPolicy(**kwargs), ] if kwargs.get("_additional_pipeline_policies"): - policies = policies + kwargs.get("_additional_pipeline_policies") - return config, AsyncPipeline(config.transport, policies=policies) + policies = policies + kwargs.get("_additional_pipeline_policies") #type: ignore + config.transport = transport #type: ignore + return config, AsyncPipeline(transport, policies=policies) #type: ignore async def _batch_send( - self, *reqs: 'HttpRequest', - **kwargs - ): + self, + *reqs: "HttpRequest", + **kwargs: Any + ) -> AsyncList["HttpResponse"]: """Given a series of request, do a Storage batch call. + + :param HttpRequest reqs: A collection of HttpRequest objects. + :returns: An AsyncList of HttpResponse objects. + :rtype: AsyncList[HttpResponse] """ # Pop it here, so requests doesn't feel bad about additional kwarg raise_on_any_failure = kwargs.pop("raise_on_any_failure", True) request = self._client._client.post( # pylint: disable=protected-access - url='{}://{}/?comp=batch{}{}'.format( - self.scheme, - self.primary_hostname, - kwargs.pop('sas', None), - kwargs.pop('timeout', None) + url=( + f'{self.scheme}://{self.primary_hostname}/' + f"{kwargs.pop('path', '')}?{kwargs.pop('restype', '')}" + f"comp=batch{kwargs.pop('sas', '')}{kwargs.pop('timeout', '')}" ), headers={ 'x-ms-version': self.api_version @@ -132,7 +173,7 @@ async def _batch_send( policies = [StorageHeadersPolicy()] if self._credential_policy: - policies.append(self._credential_policy) + policies.append(self._credential_policy) # type: ignore request.set_multipart_mixed( *reqs, @@ -160,10 +201,60 @@ async def _batch_send( ) raise error return AsyncList(parts_list) - return parts + return parts # type: ignore [no-any-return] except HttpResponseError as error: process_storage_error(error) +def parse_connection_str( + conn_str: str, + credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]], + service: str +) -> Tuple[str, Optional[str], Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]]]: # pylint: disable=line-too-long + conn_str = conn_str.rstrip(";") + conn_settings_list = [s.split("=", 1) for s in conn_str.split(";")] + if any(len(tup) != 2 for tup in conn_settings_list): + raise ValueError("Connection string is either blank or malformed.") + conn_settings = dict((key.upper(), val) for key, val in conn_settings_list) + endpoints = _SERVICE_PARAMS[service] + primary = None + secondary = None + if not credential: + try: + credential = {"account_name": conn_settings["ACCOUNTNAME"], "account_key": conn_settings["ACCOUNTKEY"]} + except KeyError: + credential = conn_settings.get("SHAREDACCESSSIGNATURE") + if endpoints["primary"] in conn_settings: + primary = conn_settings[endpoints["primary"]] + if endpoints["secondary"] in conn_settings: + secondary = conn_settings[endpoints["secondary"]] + else: + if endpoints["secondary"] in conn_settings: + raise ValueError("Connection string specifies only secondary endpoint.") + try: + primary =( + f"{conn_settings['DEFAULTENDPOINTSPROTOCOL']}://" + f"{conn_settings['ACCOUNTNAME']}.{service}.{conn_settings['ENDPOINTSUFFIX']}" + ) + secondary = ( + f"{conn_settings['ACCOUNTNAME']}-secondary." + f"{service}.{conn_settings['ENDPOINTSUFFIX']}" + ) + except KeyError: + pass + + if not primary: + try: + primary = ( + f"https://{conn_settings['ACCOUNTNAME']}." + f"{service}.{conn_settings.get('ENDPOINTSUFFIX', SERVICE_HOST_BASE)}" + ) + except KeyError as exc: + raise ValueError("Connection string missing required connection details.") from exc + if service == "dfs": + primary = primary.replace(".blob.", ".dfs.") + if secondary: + secondary = secondary.replace(".blob.", ".dfs.") + return primary, secondary, credential class AsyncTransportWrapper(AsyncHttpTransport): """Wrapper class that ensures that an inner client created @@ -185,5 +276,5 @@ async def close(self): async def __aenter__(self): pass - async def __aexit__(self, *args): # pylint: disable=arguments-differ + async def __aexit__(self, *args): pass diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/constants.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/constants.py index bdee829383cd..0b4b029a2d1b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/constants.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/constants.py @@ -4,24 +4,16 @@ # license information. # -------------------------------------------------------------------------- -import sys -from .._generated import AzureBlobStorage +from .._serialize import _SUPPORTED_API_VERSIONS -X_MS_VERSION = AzureBlobStorage(url="get_api_version")._config.version # pylint: disable=protected-access +X_MS_VERSION = _SUPPORTED_API_VERSIONS[-1] -# Socket timeout in seconds +# Default socket timeouts, in seconds CONNECTION_TIMEOUT = 20 -READ_TIMEOUT = 20 - -# for python 3.5+, there was a change to the definition of the socket timeout (as far as socket.sendall is concerned) -# The socket timeout is now the maximum total duration to send all data. -if sys.version_info >= (3, 5): - # the timeout to connect is 20 seconds, and the read timeout is 80000 seconds - # the 80000 seconds was calculated with: - # 4000MB (max block size)/ 50KB/s (an arbitrarily chosen minimum upload speed) - READ_TIMEOUT = 80000 +READ_TIMEOUT = 60 +DEFAULT_OAUTH_SCOPE = "/.default" STORAGE_OAUTH_SCOPE = "https://storage.azure.com/.default" SERVICE_HOST_BASE = 'core.windows.net' diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/models.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/models.py index c51356bd885f..d78cd9113133 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/models.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/models.py @@ -5,6 +5,11 @@ # -------------------------------------------------------------------------- # pylint: disable=too-many-instance-attributes from enum import Enum +from typing import Optional + +from azure.core import CaseInsensitiveEnumMeta +from azure.core.configuration import Configuration +from azure.core.pipeline.policies import UserAgentPolicy def get_enum_value(value): @@ -16,170 +21,177 @@ def get_enum_value(value): return value -class StorageErrorCode(str, Enum): +class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): # Generic storage values - account_already_exists = "AccountAlreadyExists" - account_being_created = "AccountBeingCreated" - account_is_disabled = "AccountIsDisabled" - authentication_failed = "AuthenticationFailed" - authorization_failure = "AuthorizationFailure" - no_authentication_information = "NoAuthenticationInformation" - condition_headers_not_supported = "ConditionHeadersNotSupported" - condition_not_met = "ConditionNotMet" - empty_metadata_key = "EmptyMetadataKey" - insufficient_account_permissions = "InsufficientAccountPermissions" - internal_error = "InternalError" - invalid_authentication_info = "InvalidAuthenticationInfo" - invalid_header_value = "InvalidHeaderValue" - invalid_http_verb = "InvalidHttpVerb" - invalid_input = "InvalidInput" - invalid_md5 = "InvalidMd5" - invalid_metadata = "InvalidMetadata" - invalid_query_parameter_value = "InvalidQueryParameterValue" - invalid_range = "InvalidRange" - invalid_resource_name = "InvalidResourceName" - invalid_uri = "InvalidUri" - invalid_xml_document = "InvalidXmlDocument" - invalid_xml_node_value = "InvalidXmlNodeValue" - md5_mismatch = "Md5Mismatch" - metadata_too_large = "MetadataTooLarge" - missing_content_length_header = "MissingContentLengthHeader" - missing_required_query_parameter = "MissingRequiredQueryParameter" - missing_required_header = "MissingRequiredHeader" - missing_required_xml_node = "MissingRequiredXmlNode" - multiple_condition_headers_not_supported = "MultipleConditionHeadersNotSupported" - operation_timed_out = "OperationTimedOut" - out_of_range_input = "OutOfRangeInput" - out_of_range_query_parameter_value = "OutOfRangeQueryParameterValue" - request_body_too_large = "RequestBodyTooLarge" - resource_type_mismatch = "ResourceTypeMismatch" - request_url_failed_to_parse = "RequestUrlFailedToParse" - resource_already_exists = "ResourceAlreadyExists" - resource_not_found = "ResourceNotFound" - server_busy = "ServerBusy" - unsupported_header = "UnsupportedHeader" - unsupported_xml_node = "UnsupportedXmlNode" - unsupported_query_parameter = "UnsupportedQueryParameter" - unsupported_http_verb = "UnsupportedHttpVerb" + ACCOUNT_ALREADY_EXISTS = "AccountAlreadyExists" + ACCOUNT_BEING_CREATED = "AccountBeingCreated" + ACCOUNT_IS_DISABLED = "AccountIsDisabled" + AUTHENTICATION_FAILED = "AuthenticationFailed" + AUTHORIZATION_FAILURE = "AuthorizationFailure" + NO_AUTHENTICATION_INFORMATION = "NoAuthenticationInformation" + CONDITION_HEADERS_NOT_SUPPORTED = "ConditionHeadersNotSupported" + CONDITION_NOT_MET = "ConditionNotMet" + EMPTY_METADATA_KEY = "EmptyMetadataKey" + INSUFFICIENT_ACCOUNT_PERMISSIONS = "InsufficientAccountPermissions" + INTERNAL_ERROR = "InternalError" + INVALID_AUTHENTICATION_INFO = "InvalidAuthenticationInfo" + INVALID_HEADER_VALUE = "InvalidHeaderValue" + INVALID_HTTP_VERB = "InvalidHttpVerb" + INVALID_INPUT = "InvalidInput" + INVALID_MD5 = "InvalidMd5" + INVALID_METADATA = "InvalidMetadata" + INVALID_QUERY_PARAMETER_VALUE = "InvalidQueryParameterValue" + INVALID_RANGE = "InvalidRange" + INVALID_RESOURCE_NAME = "InvalidResourceName" + INVALID_URI = "InvalidUri" + INVALID_XML_DOCUMENT = "InvalidXmlDocument" + INVALID_XML_NODE_VALUE = "InvalidXmlNodeValue" + MD5_MISMATCH = "Md5Mismatch" + METADATA_TOO_LARGE = "MetadataTooLarge" + MISSING_CONTENT_LENGTH_HEADER = "MissingContentLengthHeader" + MISSING_REQUIRED_QUERY_PARAMETER = "MissingRequiredQueryParameter" + MISSING_REQUIRED_HEADER = "MissingRequiredHeader" + MISSING_REQUIRED_XML_NODE = "MissingRequiredXmlNode" + MULTIPLE_CONDITION_HEADERS_NOT_SUPPORTED = "MultipleConditionHeadersNotSupported" + OPERATION_TIMED_OUT = "OperationTimedOut" + OUT_OF_RANGE_INPUT = "OutOfRangeInput" + OUT_OF_RANGE_QUERY_PARAMETER_VALUE = "OutOfRangeQueryParameterValue" + REQUEST_BODY_TOO_LARGE = "RequestBodyTooLarge" + RESOURCE_TYPE_MISMATCH = "ResourceTypeMismatch" + REQUEST_URL_FAILED_TO_PARSE = "RequestUrlFailedToParse" + RESOURCE_ALREADY_EXISTS = "ResourceAlreadyExists" + RESOURCE_NOT_FOUND = "ResourceNotFound" + SERVER_BUSY = "ServerBusy" + UNSUPPORTED_HEADER = "UnsupportedHeader" + UNSUPPORTED_XML_NODE = "UnsupportedXmlNode" + UNSUPPORTED_QUERY_PARAMETER = "UnsupportedQueryParameter" + UNSUPPORTED_HTTP_VERB = "UnsupportedHttpVerb" # Blob values - append_position_condition_not_met = "AppendPositionConditionNotMet" - blob_already_exists = "BlobAlreadyExists" - blob_not_found = "BlobNotFound" - blob_overwritten = "BlobOverwritten" - blob_tier_inadequate_for_content_length = "BlobTierInadequateForContentLength" - block_count_exceeds_limit = "BlockCountExceedsLimit" - block_list_too_long = "BlockListTooLong" - cannot_change_to_lower_tier = "CannotChangeToLowerTier" - cannot_verify_copy_source = "CannotVerifyCopySource" - container_already_exists = "ContainerAlreadyExists" - container_being_deleted = "ContainerBeingDeleted" - container_disabled = "ContainerDisabled" - container_not_found = "ContainerNotFound" - content_length_larger_than_tier_limit = "ContentLengthLargerThanTierLimit" - copy_across_accounts_not_supported = "CopyAcrossAccountsNotSupported" - copy_id_mismatch = "CopyIdMismatch" - feature_version_mismatch = "FeatureVersionMismatch" - incremental_copy_blob_mismatch = "IncrementalCopyBlobMismatch" - incremental_copy_of_eralier_version_snapshot_not_allowed = "IncrementalCopyOfEralierVersionSnapshotNotAllowed" - incremental_copy_source_must_be_snapshot = "IncrementalCopySourceMustBeSnapshot" - infinite_lease_duration_required = "InfiniteLeaseDurationRequired" - invalid_blob_or_block = "InvalidBlobOrBlock" - invalid_blob_tier = "InvalidBlobTier" - invalid_blob_type = "InvalidBlobType" - invalid_block_id = "InvalidBlockId" - invalid_block_list = "InvalidBlockList" - invalid_operation = "InvalidOperation" - invalid_page_range = "InvalidPageRange" - invalid_source_blob_type = "InvalidSourceBlobType" - invalid_source_blob_url = "InvalidSourceBlobUrl" - invalid_version_for_page_blob_operation = "InvalidVersionForPageBlobOperation" - lease_already_present = "LeaseAlreadyPresent" - lease_already_broken = "LeaseAlreadyBroken" - lease_id_mismatch_with_blob_operation = "LeaseIdMismatchWithBlobOperation" - lease_id_mismatch_with_container_operation = "LeaseIdMismatchWithContainerOperation" - lease_id_mismatch_with_lease_operation = "LeaseIdMismatchWithLeaseOperation" - lease_id_missing = "LeaseIdMissing" - lease_is_breaking_and_cannot_be_acquired = "LeaseIsBreakingAndCannotBeAcquired" - lease_is_breaking_and_cannot_be_changed = "LeaseIsBreakingAndCannotBeChanged" - lease_is_broken_and_cannot_be_renewed = "LeaseIsBrokenAndCannotBeRenewed" - lease_lost = "LeaseLost" - lease_not_present_with_blob_operation = "LeaseNotPresentWithBlobOperation" - lease_not_present_with_container_operation = "LeaseNotPresentWithContainerOperation" - lease_not_present_with_lease_operation = "LeaseNotPresentWithLeaseOperation" - max_blob_size_condition_not_met = "MaxBlobSizeConditionNotMet" - no_pending_copy_operation = "NoPendingCopyOperation" - operation_not_allowed_on_incremental_copy_blob = "OperationNotAllowedOnIncrementalCopyBlob" - pending_copy_operation = "PendingCopyOperation" - previous_snapshot_cannot_be_newer = "PreviousSnapshotCannotBeNewer" - previous_snapshot_not_found = "PreviousSnapshotNotFound" - previous_snapshot_operation_not_supported = "PreviousSnapshotOperationNotSupported" - sequence_number_condition_not_met = "SequenceNumberConditionNotMet" - sequence_number_increment_too_large = "SequenceNumberIncrementTooLarge" - snapshot_count_exceeded = "SnapshotCountExceeded" - snaphot_operation_rate_exceeded = "SnaphotOperationRateExceeded" - snapshots_present = "SnapshotsPresent" - source_condition_not_met = "SourceConditionNotMet" - system_in_use = "SystemInUse" - target_condition_not_met = "TargetConditionNotMet" - unauthorized_blob_overwrite = "UnauthorizedBlobOverwrite" - blob_being_rehydrated = "BlobBeingRehydrated" - blob_archived = "BlobArchived" - blob_not_archived = "BlobNotArchived" + APPEND_POSITION_CONDITION_NOT_MET = "AppendPositionConditionNotMet" + BLOB_ACCESS_TIER_NOT_SUPPORTED_FOR_ACCOUNT_TYPE = "BlobAccessTierNotSupportedForAccountType" + BLOB_ALREADY_EXISTS = "BlobAlreadyExists" + BLOB_NOT_FOUND = "BlobNotFound" + BLOB_OVERWRITTEN = "BlobOverwritten" + BLOB_TIER_INADEQUATE_FOR_CONTENT_LENGTH = "BlobTierInadequateForContentLength" + BLOCK_COUNT_EXCEEDS_LIMIT = "BlockCountExceedsLimit" + BLOCK_LIST_TOO_LONG = "BlockListTooLong" + CANNOT_CHANGE_TO_LOWER_TIER = "CannotChangeToLowerTier" + CANNOT_VERIFY_COPY_SOURCE = "CannotVerifyCopySource" + CONTAINER_ALREADY_EXISTS = "ContainerAlreadyExists" + CONTAINER_BEING_DELETED = "ContainerBeingDeleted" + CONTAINER_DISABLED = "ContainerDisabled" + CONTAINER_NOT_FOUND = "ContainerNotFound" + CONTENT_LENGTH_LARGER_THAN_TIER_LIMIT = "ContentLengthLargerThanTierLimit" + COPY_ACROSS_ACCOUNTS_NOT_SUPPORTED = "CopyAcrossAccountsNotSupported" + COPY_ID_MISMATCH = "CopyIdMismatch" + FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" + INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" + INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" + #: Deprecated: Please use INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED instead. + INCREMENTAL_COPY_OF_ERALIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" + INCREMENTAL_COPY_SOURCE_MUST_BE_SNAPSHOT = "IncrementalCopySourceMustBeSnapshot" + INFINITE_LEASE_DURATION_REQUIRED = "InfiniteLeaseDurationRequired" + INVALID_BLOB_OR_BLOCK = "InvalidBlobOrBlock" + INVALID_BLOB_TIER = "InvalidBlobTier" + INVALID_BLOB_TYPE = "InvalidBlobType" + INVALID_BLOCK_ID = "InvalidBlockId" + INVALID_BLOCK_LIST = "InvalidBlockList" + INVALID_OPERATION = "InvalidOperation" + INVALID_PAGE_RANGE = "InvalidPageRange" + INVALID_SOURCE_BLOB_TYPE = "InvalidSourceBlobType" + INVALID_SOURCE_BLOB_URL = "InvalidSourceBlobUrl" + INVALID_VERSION_FOR_PAGE_BLOB_OPERATION = "InvalidVersionForPageBlobOperation" + LEASE_ALREADY_PRESENT = "LeaseAlreadyPresent" + LEASE_ALREADY_BROKEN = "LeaseAlreadyBroken" + LEASE_ID_MISMATCH_WITH_BLOB_OPERATION = "LeaseIdMismatchWithBlobOperation" + LEASE_ID_MISMATCH_WITH_CONTAINER_OPERATION = "LeaseIdMismatchWithContainerOperation" + LEASE_ID_MISMATCH_WITH_LEASE_OPERATION = "LeaseIdMismatchWithLeaseOperation" + LEASE_ID_MISSING = "LeaseIdMissing" + LEASE_IS_BREAKING_AND_CANNOT_BE_ACQUIRED = "LeaseIsBreakingAndCannotBeAcquired" + LEASE_IS_BREAKING_AND_CANNOT_BE_CHANGED = "LeaseIsBreakingAndCannotBeChanged" + LEASE_IS_BROKEN_AND_CANNOT_BE_RENEWED = "LeaseIsBrokenAndCannotBeRenewed" + LEASE_LOST = "LeaseLost" + LEASE_NOT_PRESENT_WITH_BLOB_OPERATION = "LeaseNotPresentWithBlobOperation" + LEASE_NOT_PRESENT_WITH_CONTAINER_OPERATION = "LeaseNotPresentWithContainerOperation" + LEASE_NOT_PRESENT_WITH_LEASE_OPERATION = "LeaseNotPresentWithLeaseOperation" + MAX_BLOB_SIZE_CONDITION_NOT_MET = "MaxBlobSizeConditionNotMet" + NO_PENDING_COPY_OPERATION = "NoPendingCopyOperation" + OPERATION_NOT_ALLOWED_ON_INCREMENTAL_COPY_BLOB = "OperationNotAllowedOnIncrementalCopyBlob" + PENDING_COPY_OPERATION = "PendingCopyOperation" + PREVIOUS_SNAPSHOT_CANNOT_BE_NEWER = "PreviousSnapshotCannotBeNewer" + PREVIOUS_SNAPSHOT_NOT_FOUND = "PreviousSnapshotNotFound" + PREVIOUS_SNAPSHOT_OPERATION_NOT_SUPPORTED = "PreviousSnapshotOperationNotSupported" + SEQUENCE_NUMBER_CONDITION_NOT_MET = "SequenceNumberConditionNotMet" + SEQUENCE_NUMBER_INCREMENT_TOO_LARGE = "SequenceNumberIncrementTooLarge" + SNAPSHOT_COUNT_EXCEEDED = "SnapshotCountExceeded" + SNAPSHOT_OPERATION_RATE_EXCEEDED = "SnapshotOperationRateExceeded" + #: Deprecated: Please use SNAPSHOT_OPERATION_RATE_EXCEEDED instead. + SNAPHOT_OPERATION_RATE_EXCEEDED = "SnapshotOperationRateExceeded" + SNAPSHOTS_PRESENT = "SnapshotsPresent" + SOURCE_CONDITION_NOT_MET = "SourceConditionNotMet" + SYSTEM_IN_USE = "SystemInUse" + TARGET_CONDITION_NOT_MET = "TargetConditionNotMet" + UNAUTHORIZED_BLOB_OVERWRITE = "UnauthorizedBlobOverwrite" + BLOB_BEING_REHYDRATED = "BlobBeingRehydrated" + BLOB_ARCHIVED = "BlobArchived" + BLOB_NOT_ARCHIVED = "BlobNotArchived" # Queue values - invalid_marker = "InvalidMarker" - message_not_found = "MessageNotFound" - message_too_large = "MessageTooLarge" - pop_receipt_mismatch = "PopReceiptMismatch" - queue_already_exists = "QueueAlreadyExists" - queue_being_deleted = "QueueBeingDeleted" - queue_disabled = "QueueDisabled" - queue_not_empty = "QueueNotEmpty" - queue_not_found = "QueueNotFound" + INVALID_MARKER = "InvalidMarker" + MESSAGE_NOT_FOUND = "MessageNotFound" + MESSAGE_TOO_LARGE = "MessageTooLarge" + POP_RECEIPT_MISMATCH = "PopReceiptMismatch" + QUEUE_ALREADY_EXISTS = "QueueAlreadyExists" + QUEUE_BEING_DELETED = "QueueBeingDeleted" + QUEUE_DISABLED = "QueueDisabled" + QUEUE_NOT_EMPTY = "QueueNotEmpty" + QUEUE_NOT_FOUND = "QueueNotFound" # File values - cannot_delete_file_or_directory = "CannotDeleteFileOrDirectory" - client_cache_flush_delay = "ClientCacheFlushDelay" - delete_pending = "DeletePending" - directory_not_empty = "DirectoryNotEmpty" - file_lock_conflict = "FileLockConflict" - invalid_file_or_directory_path_name = "InvalidFileOrDirectoryPathName" - parent_not_found = "ParentNotFound" - read_only_attribute = "ReadOnlyAttribute" - share_already_exists = "ShareAlreadyExists" - share_being_deleted = "ShareBeingDeleted" - share_disabled = "ShareDisabled" - share_not_found = "ShareNotFound" - sharing_violation = "SharingViolation" - share_snapshot_in_progress = "ShareSnapshotInProgress" - share_snapshot_count_exceeded = "ShareSnapshotCountExceeded" - share_snapshot_operation_not_supported = "ShareSnapshotOperationNotSupported" - share_has_snapshots = "ShareHasSnapshots" - container_quota_downgrade_not_allowed = "ContainerQuotaDowngradeNotAllowed" + CANNOT_DELETE_FILE_OR_DIRECTORY = "CannotDeleteFileOrDirectory" + CLIENT_CACHE_FLUSH_DELAY = "ClientCacheFlushDelay" + DELETE_PENDING = "DeletePending" + DIRECTORY_NOT_EMPTY = "DirectoryNotEmpty" + FILE_LOCK_CONFLICT = "FileLockConflict" + FILE_SHARE_PROVISIONED_BANDWIDTH_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedBandwidthDowngradeNotAllowed" + FILE_SHARE_PROVISIONED_IOPS_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedIopsDowngradeNotAllowed" + INVALID_FILE_OR_DIRECTORY_PATH_NAME = "InvalidFileOrDirectoryPathName" + PARENT_NOT_FOUND = "ParentNotFound" + READ_ONLY_ATTRIBUTE = "ReadOnlyAttribute" + SHARE_ALREADY_EXISTS = "ShareAlreadyExists" + SHARE_BEING_DELETED = "ShareBeingDeleted" + SHARE_DISABLED = "ShareDisabled" + SHARE_NOT_FOUND = "ShareNotFound" + SHARING_VIOLATION = "SharingViolation" + SHARE_SNAPSHOT_IN_PROGRESS = "ShareSnapshotInProgress" + SHARE_SNAPSHOT_COUNT_EXCEEDED = "ShareSnapshotCountExceeded" + SHARE_SNAPSHOT_OPERATION_NOT_SUPPORTED = "ShareSnapshotOperationNotSupported" + SHARE_HAS_SNAPSHOTS = "ShareHasSnapshots" + CONTAINER_QUOTA_DOWNGRADE_NOT_ALLOWED = "ContainerQuotaDowngradeNotAllowed" # DataLake values - content_length_must_be_zero = 'ContentLengthMustBeZero' - path_already_exists = 'PathAlreadyExists' - invalid_flush_position = 'InvalidFlushPosition' - invalid_property_name = 'InvalidPropertyName' - invalid_source_uri = 'InvalidSourceUri' - unsupported_rest_version = 'UnsupportedRestVersion' - file_system_not_found = 'FilesystemNotFound' - path_not_found = 'PathNotFound' - rename_destination_parent_path_not_found = 'RenameDestinationParentPathNotFound' - source_path_not_found = 'SourcePathNotFound' - destination_path_is_being_deleted = 'DestinationPathIsBeingDeleted' - file_system_already_exists = 'FilesystemAlreadyExists' - file_system_being_deleted = 'FilesystemBeingDeleted' - invalid_destination_path = 'InvalidDestinationPath' - invalid_rename_source_path = 'InvalidRenameSourcePath' - invalid_source_or_destination_resource_type = 'InvalidSourceOrDestinationResourceType' - lease_is_already_broken = 'LeaseIsAlreadyBroken' - lease_name_mismatch = 'LeaseNameMismatch' - path_conflict = 'PathConflict' - source_path_is_being_deleted = 'SourcePathIsBeingDeleted' + CONTENT_LENGTH_MUST_BE_ZERO = 'ContentLengthMustBeZero' + PATH_ALREADY_EXISTS = 'PathAlreadyExists' + INVALID_FLUSH_POSITION = 'InvalidFlushPosition' + INVALID_PROPERTY_NAME = 'InvalidPropertyName' + INVALID_SOURCE_URI = 'InvalidSourceUri' + UNSUPPORTED_REST_VERSION = 'UnsupportedRestVersion' + FILE_SYSTEM_NOT_FOUND = 'FilesystemNotFound' + PATH_NOT_FOUND = 'PathNotFound' + RENAME_DESTINATION_PARENT_PATH_NOT_FOUND = 'RenameDestinationParentPathNotFound' + SOURCE_PATH_NOT_FOUND = 'SourcePathNotFound' + DESTINATION_PATH_IS_BEING_DELETED = 'DestinationPathIsBeingDeleted' + FILE_SYSTEM_ALREADY_EXISTS = 'FilesystemAlreadyExists' + FILE_SYSTEM_BEING_DELETED = 'FilesystemBeingDeleted' + INVALID_DESTINATION_PATH = 'InvalidDestinationPath' + INVALID_RENAME_SOURCE_PATH = 'InvalidRenameSourcePath' + INVALID_SOURCE_OR_DESTINATION_RESOURCE_TYPE = 'InvalidSourceOrDestinationResourceType' + LEASE_IS_ALREADY_BROKEN = 'LeaseIsAlreadyBroken' + LEASE_NAME_MISMATCH = 'LeaseNameMismatch' + PATH_CONFLICT = 'PathConflict' + SOURCE_PATH_IS_BEING_DELETED = 'SourcePathIsBeingDeleted' class DictMixin(object): @@ -199,19 +211,22 @@ def __len__(self): def __delitem__(self, key): self.__dict__[key] = None + # Compare objects by comparing all attributes. def __eq__(self, other): - """Compare objects by comparing all attributes.""" if isinstance(other, self.__class__): return self.__dict__ == other.__dict__ return False + # Compare objects by comparing all attributes. def __ne__(self, other): - """Compare objects by comparing all attributes.""" return not self.__eq__(other) def __str__(self): return str({k: v for k, v in self.__dict__.items() if not k.startswith('_')}) + def __contains__(self, key): + return key in self.__dict__ + def has_key(self, k): return k in self.__dict__ @@ -260,7 +275,17 @@ class ResourceTypes(object): files(e.g. Put Blob, Query Entity, Get Messages, Create File, etc.) """ - def __init__(self, service=False, container=False, object=False): # pylint: disable=redefined-builtin + service: bool = False + container: bool = False + object: bool = False + _str: str + + def __init__( + self, + service: bool = False, + container: bool = False, + object: bool = False # pylint: disable=redefined-builtin + ) -> None: self.service = service self.container = container self.object = object @@ -289,7 +314,7 @@ def from_string(cls, string): res_object = 'o' in string parsed = cls(res_service, res_container, res_object) - parsed._str = string # pylint: disable = protected-access + parsed._str = string return parsed @@ -328,14 +353,46 @@ class AccountSasPermissions(object): To enable set or get tags on the blobs in the container. :keyword bool filter_by_tags: To enable get blobs by tags, this should be used together with list permission. + :keyword bool set_immutability_policy: + To enable operations related to set/delete immutability policy. + To get immutability policy, you just need read permission. + :keyword bool permanent_delete: + To enable permanent delete on the blob is permitted. + Valid for Object resource type of Blob only. """ - def __init__(self, read=False, write=False, delete=False, - list=False, # pylint: disable=redefined-builtin - add=False, create=False, update=False, process=False, delete_previous_version=False, **kwargs): + + read: bool = False + write: bool = False + delete: bool = False + delete_previous_version: bool = False + list: bool = False + add: bool = False + create: bool = False + update: bool = False + process: bool = False + tag: bool = False + filter_by_tags: bool = False + set_immutability_policy: bool = False + permanent_delete: bool = False + + def __init__( + self, + read: bool = False, + write: bool = False, + delete: bool = False, + list: bool = False, # pylint: disable=redefined-builtin + add: bool = False, + create: bool = False, + update: bool = False, + process: bool = False, + delete_previous_version: bool = False, + **kwargs + ) -> None: self.read = read self.write = write self.delete = delete self.delete_previous_version = delete_previous_version + self.permanent_delete = kwargs.pop('permanent_delete', False) self.list = list self.add = add self.create = create @@ -343,17 +400,20 @@ def __init__(self, read=False, write=False, delete=False, self.process = process self.tag = kwargs.pop('tag', False) self.filter_by_tags = kwargs.pop('filter_by_tags', False) + self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) self._str = (('r' if self.read else '') + ('w' if self.write else '') + ('d' if self.delete else '') + ('x' if self.delete_previous_version else '') + + ('y' if self.permanent_delete else '') + ('l' if self.list else '') + ('a' if self.add else '') + ('c' if self.create else '') + ('u' if self.update else '') + ('p' if self.process else '') + ('f' if self.filter_by_tags else '') + - ('t' if self.tag else '') + ('t' if self.tag else '') + + ('i' if self.set_immutability_policy else '') ) def __str__(self): @@ -376,6 +436,7 @@ def from_string(cls, permission): p_write = 'w' in permission p_delete = 'd' in permission p_delete_previous_version = 'x' in permission + p_permanent_delete = 'y' in permission p_list = 'l' in permission p_add = 'a' in permission p_create = 'c' in permission @@ -383,24 +444,32 @@ def from_string(cls, permission): p_process = 'p' in permission p_tag = 't' in permission p_filter_by_tags = 'f' in permission + p_set_immutability_policy = 'i' in permission parsed = cls(read=p_read, write=p_write, delete=p_delete, delete_previous_version=p_delete_previous_version, list=p_list, add=p_add, create=p_create, update=p_update, process=p_process, tag=p_tag, - filter_by_tags=p_filter_by_tags) + filter_by_tags=p_filter_by_tags, set_immutability_policy=p_set_immutability_policy, + permanent_delete=p_permanent_delete) return parsed + class Services(object): """Specifies the services accessible with the account SAS. - :param bool blob: - Access for the `~azure.storage.blob.BlobServiceClient` - :param bool queue: - Access for the `~azure.storage.queue.QueueServiceClient` - :param bool fileshare: - Access for the `~azure.storage.fileshare.ShareServiceClient` + :keyword bool blob: + Access for the `~azure.storage.blob.BlobServiceClient`. Default is False. + :keyword bool queue: + Access for the `~azure.storage.queue.QueueServiceClient`. Default is False. + :keyword bool fileshare: + Access for the `~azure.storage.fileshare.ShareServiceClient`. Default is False. """ - def __init__(self, blob=False, queue=False, fileshare=False): + def __init__( + self, *, + blob: bool = False, + queue: bool = False, + fileshare: bool = False + ) -> None: self.blob = blob self.queue = queue self.fileshare = fileshare @@ -428,8 +497,8 @@ def from_string(cls, string): res_queue = 'q' in string res_file = 'f' in string - parsed = cls(res_blob, res_queue, res_file) - parsed._str = string # pylint: disable = protected-access + parsed = cls(blob=res_blob, queue=res_queue, fileshare=res_file) + parsed._str = string return parsed @@ -440,22 +509,23 @@ class UserDelegationKey(object): The fields are saved as simple strings since the user does not have to interact with this object; to generate an identify SAS, the user can simply pass it to the right API. - - :ivar str signed_oid: - Object ID of this token. - :ivar str signed_tid: - Tenant ID of the tenant that issued this token. - :ivar str signed_start: - The datetime this token becomes valid. - :ivar str signed_expiry: - The datetime this token expires. - :ivar str signed_service: - What service this key is valid for. - :ivar str signed_version: - The version identifier of the REST service that created this token. - :ivar str value: - The user delegation key. """ + + signed_oid: Optional[str] = None + """Object ID of this token.""" + signed_tid: Optional[str] = None + """Tenant ID of the tenant that issued this token.""" + signed_start: Optional[str] = None + """The datetime this token becomes valid.""" + signed_expiry: Optional[str] = None + """The datetime this token expires.""" + signed_service: Optional[str] = None + """What service this key is valid for.""" + signed_version: Optional[str] = None + """The version identifier of the REST service that created this token.""" + value: Optional[str] = None + """The user delegation key.""" + def __init__(self): self.signed_oid = None self.signed_tid = None @@ -464,3 +534,52 @@ def __init__(self): self.signed_service = None self.signed_version = None self.value = None + + +class StorageConfiguration(Configuration): + """ + Specifies the configurable values used in Azure Storage. + + :param int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :param int copy_polling_interval: The interval in seconds for polling copy operations. + :param int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :param int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. + :param bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :param int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :param int min_large_chunk_upload_threshold: The max size for a single put operation. + :param int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :param int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. + :param int max_range_size: The max range size for file upload. + + """ + + max_single_put_size: int + copy_polling_interval: int + max_block_size: int + min_large_block_upload_threshold: int + use_byte_buffer: bool + max_page_size: int + min_large_chunk_upload_threshold: int + max_single_get_size: int + max_chunk_get_size: int + max_range_size: int + user_agent_policy: UserAgentPolicy + + def __init__(self, **kwargs): + super(StorageConfiguration, self).__init__(**kwargs) + self.max_single_put_size = kwargs.pop('max_single_put_size', 64 * 1024 * 1024) + self.copy_polling_interval = 15 + self.max_block_size = kwargs.pop('max_block_size', 4 * 1024 * 1024) + self.min_large_block_upload_threshold = kwargs.get('min_large_block_upload_threshold', 4 * 1024 * 1024 + 1) + self.use_byte_buffer = kwargs.pop('use_byte_buffer', False) + self.max_page_size = kwargs.pop('max_page_size', 4 * 1024 * 1024) + self.min_large_chunk_upload_threshold = kwargs.pop('min_large_chunk_upload_threshold', 100 * 1024 * 1024 + 1) + self.max_single_get_size = kwargs.pop('max_single_get_size', 32 * 1024 * 1024) + self.max_chunk_get_size = kwargs.pop('max_chunk_get_size', 4 * 1024 * 1024) + self.max_range_size = kwargs.pop('max_range_size', 4 * 1024 * 1024) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/parser.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/parser.py index 0b45774face0..112c1984f4fb 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/parser.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/parser.py @@ -4,8 +4,50 @@ # license information. # -------------------------------------------------------------------------- -import sys +from datetime import datetime, timezone +from typing import Optional +EPOCH_AS_FILETIME = 116444736000000000 # January 1, 1970 as MS filetime +HUNDREDS_OF_NANOSECONDS = 10000000 -def _to_utc_datetime(value): + +def _to_utc_datetime(value: datetime) -> str: return value.strftime('%Y-%m-%dT%H:%M:%SZ') + + +def _rfc_1123_to_datetime(rfc_1123: str) -> Optional[datetime]: + """Converts an RFC 1123 date string to a UTC datetime. + + :param str rfc_1123: The time and date in RFC 1123 format. + :returns: The time and date in UTC datetime format. + :rtype: datetime + """ + if not rfc_1123: + return None + + return datetime.strptime(rfc_1123, "%a, %d %b %Y %H:%M:%S %Z") + + +def _filetime_to_datetime(filetime: str) -> Optional[datetime]: + """Converts an MS filetime string to a UTC datetime. "0" indicates None. + If parsing MS Filetime fails, tries RFC 1123 as backup. + + :param str filetime: The time and date in MS filetime format. + :returns: The time and date in UTC datetime format. + :rtype: datetime + """ + if not filetime: + return None + + # Try to convert to MS Filetime + try: + temp_filetime = int(filetime) + if temp_filetime == 0: + return None + + return datetime.fromtimestamp((temp_filetime - EPOCH_AS_FILETIME) / HUNDREDS_OF_NANOSECONDS, tz=timezone.utc) + except ValueError: + pass + + # Try RFC 1123 as backup + return _rfc_1123_to_datetime(filetime) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/policies.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/policies.py index 695bc35ded70..ee75cd5a466c 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/policies.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/policies.py @@ -6,48 +6,55 @@ import base64 import hashlib -import re -import random -from time import time -from io import SEEK_SET, UnsupportedOperation import logging +import random +import re import uuid -import types -from typing import Any, TYPE_CHECKING -from wsgiref.handlers import format_date_time +from io import SEEK_SET, UnsupportedOperation +from time import time +from typing import Any, Dict, Optional, TYPE_CHECKING from urllib.parse import ( - urlparse, parse_qsl, - urlunparse, urlencode, + urlparse, + urlunparse, ) +from wsgiref.handlers import format_date_time +from azure.core.exceptions import AzureError, ServiceRequestError, ServiceResponseError from azure.core.pipeline.policies import ( + BearerTokenCredentialPolicy, HeadersPolicy, - SansIOHTTPPolicy, - NetworkTraceLoggingPolicy, HTTPPolicy, - RequestHistory + NetworkTraceLoggingPolicy, + RequestHistory, + SansIOHTTPPolicy ) -from azure.core.exceptions import AzureError, ServiceRequestError, ServiceResponseError +from .authentication import AzureSigningError, StorageHttpChallenge +from .constants import DEFAULT_OAUTH_SCOPE from .models import LocationMode - if TYPE_CHECKING: - from azure.core.pipeline import PipelineRequest, PipelineResponse + from azure.core.credentials import TokenCredential + from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import + PipelineRequest, + PipelineResponse + ) _LOGGER = logging.getLogger(__name__) def encode_base64(data): + if isinstance(data, str): + data = data.encode('utf-8') encoded = base64.b64encode(data) return encoded.decode('utf-8') +# Are we out of retries? def is_exhausted(settings): - """Are we out of retries?""" retry_counts = (settings['total'], settings['connect'], settings['read'], settings['status']) retry_counts = list(filter(None, retry_counts)) if not retry_counts: @@ -60,16 +67,15 @@ def retry_hook(settings, **kwargs): settings['hook'](retry_count=settings['count'] - 1, location_mode=settings['mode'], **kwargs) +# Is this method/status code retryable? (Based on allowlists and control +# variables such as the number of total retries to allow, whether to +# respect the Retry-After header, whether this header is present, and +# whether the returned status code is on the list of status codes to +# be retried upon on the presence of the aforementioned header) def is_retry(response, mode): - """Is this method/status code retryable? (Based on allowlists and control - variables such as the number of total retries to allow, whether to - respect the Retry-After header, whether this header is present, and - whether the returned status code is on the list of status codes to - be retried upon on the presence of the aforementioned header) - """ status = response.http_response.status_code if 300 <= status < 500: - # An exception occured, but in most cases it was expected. Examples could + # An exception occurred, but in most cases it was expected. Examples could # include a 309 Conflict or 412 Precondition Failed. if status == 404 and mode == LocationMode.SECONDARY: # Response code 404 should be retried if secondary was used. @@ -87,6 +93,16 @@ def is_retry(response, mode): return False +def is_checksum_retry(response): + # retry if invalid content md5 + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + + def urljoin(base_url, stub_url): parsed = urlparse(base_url) parsed = parsed._replace(path=parsed.path + '/' + stub_url) @@ -106,8 +122,7 @@ def on_request(self, request): class StorageHeadersPolicy(HeadersPolicy): request_id_header_name = 'x-ms-client-request-id' - def on_request(self, request): - # type: (PipelineRequest, Any) -> None + def on_request(self, request: "PipelineRequest") -> None: super(StorageHeadersPolicy, self).on_request(request) current_time = format_date_time(time()) request.http_request.headers['x-ms-date'] = current_time @@ -137,8 +152,7 @@ def __init__(self, hosts=None, **kwargs): # pylint: disable=unused-argument self.hosts = hosts super(StorageHosts, self).__init__() - def on_request(self, request): - # type: (PipelineRequest, Any) -> None + def on_request(self, request: "PipelineRequest") -> None: request.context.options['hosts'] = self.hosts parsed_url = urlparse(request.http_request.url) @@ -154,7 +168,7 @@ def on_request(self, request): # Lock retries to the specific location request.context.options['retry_to_secondary'] = False if use_location not in self.hosts: - raise ValueError("Attempting to use undefined host location {}".format(use_location)) + raise ValueError(f"Attempting to use undefined host location {use_location}") if use_location != location_mode: # Update request URL to use the specified location updated = parsed_url._replace(netloc=self.hosts[use_location]) @@ -170,10 +184,14 @@ class StorageLoggingPolicy(NetworkTraceLoggingPolicy): This accepts both global configuration, and per-request level with "enable_http_logger" """ - def on_request(self, request): - # type: (PipelineRequest, Any) -> None + def __init__(self, logging_enable: bool = False, **kwargs) -> None: + self.logging_body = kwargs.pop("logging_body", False) + super(StorageLoggingPolicy, self).__init__(logging_enable=logging_enable, **kwargs) + + def on_request(self, request: "PipelineRequest") -> None: http_request = request.http_request options = request.context.options + self.logging_body = self.logging_body or options.pop("logging_body", False) if options.pop("logging_enable", self.enable_http_logger): request.context["logging_enable"] = True if not _LOGGER.isEnabledFor(logging.DEBUG): @@ -202,16 +220,15 @@ def on_request(self, request): _LOGGER.debug(" %r: %r", header, value) _LOGGER.debug("Request body:") - # We don't want to log the binary data of a file upload. - if isinstance(http_request.body, types.GeneratorType): - _LOGGER.debug("File upload") - else: + if self.logging_body: _LOGGER.debug(str(http_request.body)) + else: + # We don't want to log the binary data of a file upload. + _LOGGER.debug("Hidden body, please use logging_body to show body") except Exception as err: # pylint: disable=broad-except _LOGGER.debug("Failed to log request: %r", err) - def on_response(self, request, response): - # type: (PipelineRequest, PipelineResponse, Any) -> None + def on_response(self, request: "PipelineRequest", response: "PipelineResponse") -> None: if response.context.pop("logging_enable", self.enable_http_logger): if not _LOGGER.isEnabledFor(logging.DEBUG): return @@ -226,31 +243,35 @@ def on_response(self, request, response): _LOGGER.debug("Response content:") pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE) header = response.http_response.headers.get('content-disposition') + resp_content_type = response.http_response.headers.get("content-type", "") if header and pattern.match(header): filename = header.partition('=')[2] _LOGGER.debug("File attachments: %s", filename) - elif response.http_response.headers.get("content-type", "").endswith("octet-stream"): + elif resp_content_type.endswith("octet-stream"): _LOGGER.debug("Body contains binary data.") - elif response.http_response.headers.get("content-type", "").startswith("image"): + elif resp_content_type.startswith("image"): _LOGGER.debug("Body contains image data.") - else: - if response.context.options.get('stream', False): + + if self.logging_body and resp_content_type.startswith("text"): + _LOGGER.debug(response.http_response.text()) + elif self.logging_body: + try: + _LOGGER.debug(response.http_response.body()) + except ValueError: _LOGGER.debug("Body is streamable") - else: - _LOGGER.debug(response.http_response.text()) + except Exception as err: # pylint: disable=broad-except _LOGGER.debug("Failed to log response: %s", repr(err)) class StorageRequestHook(SansIOHTTPPolicy): - def __init__(self, **kwargs): # pylint: disable=unused-argument + def __init__(self, **kwargs): self._request_callback = kwargs.get('raw_request_hook') super(StorageRequestHook, self).__init__() - def on_request(self, request): - # type: (PipelineRequest, **Any) -> PipelineResponse + def on_request(self, request: "PipelineRequest") -> None: request_callback = request.context.options.pop('raw_request_hook', self._request_callback) if request_callback: request_callback(request) @@ -258,24 +279,33 @@ def on_request(self, request): class StorageResponseHook(HTTPPolicy): - def __init__(self, **kwargs): # pylint: disable=unused-argument + def __init__(self, **kwargs): self._response_callback = kwargs.get('raw_response_hook') super(StorageResponseHook, self).__init__() - def send(self, request): - # type: (PipelineRequest) -> PipelineResponse - data_stream_total = request.context.get('data_stream_total') or \ - request.context.options.pop('data_stream_total', None) - download_stream_current = request.context.get('download_stream_current') or \ - request.context.options.pop('download_stream_current', None) - upload_stream_current = request.context.get('upload_stream_current') or \ - request.context.options.pop('upload_stream_current', None) + def send(self, request: "PipelineRequest") -> "PipelineResponse": + # Values could be 0 + data_stream_total = request.context.get('data_stream_total') + if data_stream_total is None: + data_stream_total = request.context.options.pop('data_stream_total', None) + download_stream_current = request.context.get('download_stream_current') + if download_stream_current is None: + download_stream_current = request.context.options.pop('download_stream_current', None) + upload_stream_current = request.context.get('upload_stream_current') + if upload_stream_current is None: + upload_stream_current = request.context.options.pop('upload_stream_current', None) + response_callback = request.context.get('response_callback') or \ request.context.options.pop('raw_response_hook', self._response_callback) response = self.next.send(request) - will_retry = is_retry(response, request.context.options.get('mode')) - if not will_retry and download_stream_current is not None: + + will_retry = is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response) + # Auth error could come from Bearer challenge, in which case this request will be made again + is_auth_error = response.http_response.status_code == 401 + should_update_counts = not (will_retry or is_auth_error) + + if should_update_counts and download_stream_current is not None: download_stream_current += int(response.http_response.headers.get('Content-Length', 0)) if data_stream_total is None: content_range = response.http_response.headers.get('Content-Range') @@ -283,12 +313,13 @@ def send(self, request): data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1]) else: data_stream_total = download_stream_current - elif not will_retry and upload_stream_current is not None: + elif should_update_counts and upload_stream_current is not None: upload_stream_current += int(response.http_request.headers.get('Content-Length', 0)) for pipeline_obj in [request, response]: - pipeline_obj.context['data_stream_total'] = data_stream_total - pipeline_obj.context['download_stream_current'] = download_stream_current - pipeline_obj.context['upload_stream_current'] = upload_stream_current + if hasattr(pipeline_obj, 'context'): + pipeline_obj.context['data_stream_total'] = data_stream_total + pipeline_obj.context['download_stream_current'] = download_stream_current + pipeline_obj.context['upload_stream_current'] = upload_stream_current if response_callback: response_callback(response) request.context['response_callback'] = response_callback @@ -303,11 +334,14 @@ class StorageContentValidation(SansIOHTTPPolicy): """ header_name = 'Content-MD5' - def __init__(self, **kwargs): # pylint: disable=unused-argument + def __init__(self, **kwargs: Any) -> None: # pylint: disable=unused-argument super(StorageContentValidation, self).__init__() @staticmethod def get_content_md5(data): + # Since HTTP does not differentiate between no content and empty content, + # we have to perform a None check. + data = data or b"" md5 = hashlib.md5() # nosec if isinstance(data, bytes): md5.update(data) @@ -321,15 +355,14 @@ def get_content_md5(data): md5.update(chunk) try: data.seek(pos, SEEK_SET) - except (AttributeError, IOError): - raise ValueError("Data should be bytes or a seekable file-like object.") + except (AttributeError, IOError) as exc: + raise ValueError("Data should be bytes or a seekable file-like object.") from exc else: raise ValueError("Data should be bytes or a seekable file-like object.") return md5.digest() - def on_request(self, request): - # type: (PipelineRequest, Any) -> None + def on_request(self, request: "PipelineRequest") -> None: validate_content = request.context.options.pop('validate_content', False) if validate_content and request.http_request.method != 'GET': computed_md5 = encode_base64(StorageContentValidation.get_content_md5(request.http_request.data)) @@ -337,14 +370,14 @@ def on_request(self, request): request.context['validate_content_md5'] = computed_md5 request.context['validate_content'] = validate_content - def on_response(self, request, response): + def on_response(self, request: "PipelineRequest", response: "PipelineResponse") -> None: if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): computed_md5 = request.context.get('validate_content_md5') or \ encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) if response.http_response.headers['content-md5'] != computed_md5: - raise AzureError( - 'MD5 mismatch. Expected value is \'{0}\', computed value is \'{1}\'.'.format( - response.http_response.headers['content-md5'], computed_md5), + raise AzureError(( + f"MD5 mismatch. Expected value is '{response.http_response.headers['content-md5']}', " + f"computed value is '{computed_md5}'."), response=response.http_response ) @@ -354,7 +387,18 @@ class StorageRetryPolicy(HTTPPolicy): The base class for Exponential and Linear retries containing shared code. """ - def __init__(self, **kwargs): + total_retries: int + """The max number of retries.""" + connect_retries: int + """The max number of connect retries.""" + retry_read: int + """The max number of read retries.""" + retry_status: int + """The max number of status retries.""" + retry_to_secondary: bool + """Whether the secondary endpoint should be retried.""" + + def __init__(self, **kwargs: Any) -> None: self.total_retries = kwargs.pop('retry_total', 10) self.connect_retries = kwargs.pop('retry_connect', 3) self.read_retries = kwargs.pop('retry_read', 3) @@ -362,13 +406,12 @@ def __init__(self, **kwargs): self.retry_to_secondary = kwargs.pop('retry_to_secondary', False) super(StorageRetryPolicy, self).__init__() - def _set_next_host_location(self, settings, request): # pylint: disable=no-self-use + def _set_next_host_location(self, settings: Dict[str, Any], request: "PipelineRequest") -> None: """ A function which sets the next host location on the request, if applicable. - :param ~azure.storage.models.RetryContext context: - The retry context containing the previous host location and the request - to evaluate and possibly modify. + :param Dict[str, Any]] settings: The configurable values pertaining to the next host location. + :param PipelineRequest request: A pipeline request object. """ if settings['hosts'] and all(settings['hosts'].values()): url = urlparse(request.url) @@ -380,7 +423,7 @@ def _set_next_host_location(self, settings, request): # pylint: disable=no-self updated = url._replace(netloc=settings['hosts'].get(settings['mode'])) request.url = updated.geturl() - def configure_retries(self, request): # pylint: disable=no-self-use + def configure_retries(self, request: "PipelineRequest") -> Dict[str, Any]: body_position = None if hasattr(request.http_request.body, 'read'): try: @@ -403,10 +446,12 @@ def configure_retries(self, request): # pylint: disable=no-self-use 'history': [] } - def get_backoff_time(self, settings): # pylint: disable=unused-argument,no-self-use + def get_backoff_time(self, settings: Dict[str, Any]) -> float: # pylint: disable=unused-argument """ Formula for computing the current backoff. Should be calculated by child class. + :param Dict[str, Any] settings: The configurable values pertaining to the backoff time. + :returns: The backoff time. :rtype: float """ return 0 @@ -417,14 +462,21 @@ def sleep(self, settings, transport): return transport.sleep(backoff) - def increment(self, settings, request, response=None, error=None): + def increment( + self, settings: Dict[str, Any], + request: "PipelineRequest", + response: Optional["PipelineResponse"] = None, + error: Optional[AzureError] = None + ) -> bool: """Increment the retry counters. - :param response: A pipeline response object. - :param error: An error encountered during the request, or + :param Dict[str, Any] settings: The configurable values pertaining to the increment operation. + :param PipelineRequest request: A pipeline request object. + :param Optional[PipelineResponse] response: A pipeline response object. + :param Optional[AzureError] error: An error encountered during the request, or None if the response was received successfully. - - :return: Whether the retry attempts are exhausted. + :returns: Whether the retry attempts are exhausted. + :rtype: bool """ settings['total'] -= 1 @@ -473,7 +525,7 @@ def send(self, request): while retries_remaining: try: response = self.next.send(request) - if is_retry(response, retry_settings['mode']): + if is_retry(response, retry_settings['mode']) or is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, @@ -488,6 +540,8 @@ def send(self, request): continue break except AzureError as err: + if isinstance(err, AzureSigningError): + raise retries_remaining = self.increment( retry_settings, request=request.http_request, error=err) if retries_remaining: @@ -508,21 +562,33 @@ def send(self, request): class ExponentialRetry(StorageRetryPolicy): """Exponential retry.""" - def __init__(self, initial_backoff=15, increment_base=3, retry_total=3, - retry_to_secondary=False, random_jitter_range=3, **kwargs): - ''' + initial_backoff: int + """The initial backoff interval, in seconds, for the first retry.""" + increment_base: int + """The base, in seconds, to increment the initial_backoff by after the + first retry.""" + random_jitter_range: int + """A number in seconds which indicates a range to jitter/randomize for the back-off interval.""" + + def __init__( + self, initial_backoff: int = 15, + increment_base: int = 3, + retry_total: int = 3, + retry_to_secondary: bool = False, + random_jitter_range: int = 3, + **kwargs: Any + ) -> None: + """ Constructs an Exponential retry object. The initial_backoff is used for the first retry. Subsequent retries are retried after initial_backoff + - increment_power^retry_count seconds. For example, by default the first retry - occurs after 15 seconds, the second after (15+3^1) = 18 seconds, and the - third after (15+3^2) = 24 seconds. + increment_power^retry_count seconds. :param int initial_backoff: The initial backoff interval, in seconds, for the first retry. :param int increment_base: The base, in seconds, to increment the initial_backoff by after the first retry. - :param int max_attempts: + :param int retry_total: The maximum number of retry attempts. :param bool retry_to_secondary: Whether the request should be retried to secondary, if able. This should @@ -531,21 +597,22 @@ def __init__(self, initial_backoff=15, increment_base=3, retry_total=3, :param int random_jitter_range: A number in seconds which indicates a range to jitter/randomize for the back-off interval. For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. - ''' + """ self.initial_backoff = initial_backoff self.increment_base = increment_base self.random_jitter_range = random_jitter_range super(ExponentialRetry, self).__init__( retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) - def get_backoff_time(self, settings): + def get_backoff_time(self, settings: Dict[str, Any]) -> float: """ Calculates how long to sleep before retrying. - :return: - An integer indicating how long to wait before retrying the request, + :param Dict[str, Any]] settings: The configurable values pertaining to get backoff time. + :returns: + A float indicating how long to wait before retrying the request, or None to indicate no retry should be performed. - :rtype: int or None + :rtype: float """ random_generator = random.Random() backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count'])) @@ -557,13 +624,24 @@ def get_backoff_time(self, settings): class LinearRetry(StorageRetryPolicy): """Linear retry.""" - def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_jitter_range=3, **kwargs): + initial_backoff: int + """The backoff interval, in seconds, between retries.""" + random_jitter_range: int + """A number in seconds which indicates a range to jitter/randomize for the back-off interval.""" + + def __init__( + self, backoff: int = 15, + retry_total: int = 3, + retry_to_secondary: bool = False, + random_jitter_range: int = 3, + **kwargs: Any + ) -> None: """ Constructs a Linear retry object. :param int backoff: The backoff interval, in seconds, between retries. - :param int max_attempts: + :param int retry_total: The maximum number of retry attempts. :param bool retry_to_secondary: Whether the request should be retried to secondary, if able. This should @@ -578,14 +656,15 @@ def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_j super(LinearRetry, self).__init__( retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) - def get_backoff_time(self, settings): + def get_backoff_time(self, settings: Dict[str, Any]) -> float: """ Calculates how long to sleep before retrying. - :return: - An integer indicating how long to wait before retrying the request, + :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time. + :returns: + A float indicating how long to wait before retrying the request, or None to indicate no retry should be performed. - :rtype: int or None + :rtype: float """ random_generator = random.Random() # the backoff interval normally does not change, however there is the possibility @@ -594,3 +673,22 @@ def get_backoff_time(self, settings): if self.backoff > self.random_jitter_range else 0 random_range_end = self.backoff + self.random_jitter_range return random_generator.uniform(random_range_start, random_range_end) + + +class StorageBearerTokenCredentialPolicy(BearerTokenCredentialPolicy): + """ Custom Bearer token credential policy for following Storage Bearer challenges """ + + def __init__(self, credential: "TokenCredential", audience: str, **kwargs: Any) -> None: + super(StorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs) + + def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool: + try: + auth_header = response.http_response.headers.get("WWW-Authenticate") + challenge = StorageHttpChallenge(auth_header) + except ValueError: + return False + + scope = challenge.resource_id + DEFAULT_OAUTH_SCOPE + self.authorize_request(request, scope, tenant_id=challenge.tenant_id) + + return True diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/policies_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/policies_async.py index e0926b81dbc5..b8574a19f1ed 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/policies_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/policies_async.py @@ -6,17 +6,23 @@ # pylint: disable=invalid-overridden-method import asyncio -import random import logging -from typing import Any, TYPE_CHECKING +import random +from typing import Any, Dict, TYPE_CHECKING -from azure.core.pipeline.policies import AsyncHTTPPolicy -from azure.core.exceptions import AzureError +from azure.core.exceptions import AzureError, StreamClosedError, StreamConsumedError +from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy -from .policies import is_retry, StorageRetryPolicy +from .authentication import AzureSigningError, StorageHttpChallenge +from .constants import DEFAULT_OAUTH_SCOPE +from .policies import encode_base64, is_retry, StorageContentValidation, StorageRetryPolicy if TYPE_CHECKING: - from azure.core.pipeline import PipelineRequest, PipelineResponse + from azure.core.credentials_async import AsyncTokenCredential + from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import + PipelineRequest, + PipelineResponse + ) _LOGGER = logging.getLogger(__name__) @@ -36,28 +42,49 @@ async def retry_hook(settings, **kwargs): **kwargs) +async def is_checksum_retry(response): + # retry if invalid content md5 + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + try: + await response.http_response.read() # Load the body in memory and close the socket + except (StreamClosedError, StreamConsumedError): + pass + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + + class AsyncStorageResponseHook(AsyncHTTPPolicy): - def __init__(self, **kwargs): # pylint: disable=unused-argument + def __init__(self, **kwargs): self._response_callback = kwargs.get('raw_response_hook') super(AsyncStorageResponseHook, self).__init__() - async def send(self, request): - # type: (PipelineRequest) -> PipelineResponse - data_stream_total = request.context.get('data_stream_total') or \ - request.context.options.pop('data_stream_total', None) - download_stream_current = request.context.get('download_stream_current') or \ - request.context.options.pop('download_stream_current', None) - upload_stream_current = request.context.get('upload_stream_current') or \ - request.context.options.pop('upload_stream_current', None) + async def send(self, request: "PipelineRequest") -> "PipelineResponse": + # Values could be 0 + data_stream_total = request.context.get('data_stream_total') + if data_stream_total is None: + data_stream_total = request.context.options.pop('data_stream_total', None) + download_stream_current = request.context.get('download_stream_current') + if download_stream_current is None: + download_stream_current = request.context.options.pop('download_stream_current', None) + upload_stream_current = request.context.get('upload_stream_current') + if upload_stream_current is None: + upload_stream_current = request.context.options.pop('upload_stream_current', None) + response_callback = request.context.get('response_callback') or \ request.context.options.pop('raw_response_hook', self._response_callback) response = await self.next.send(request) - await response.http_response.load_body() + will_retry = is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response) + + # Auth error could come from Bearer challenge, in which case this request will be made again + is_auth_error = response.http_response.status_code == 401 + should_update_counts = not (will_retry or is_auth_error) - will_retry = is_retry(response, request.context.options.get('mode')) - if not will_retry and download_stream_current is not None: + if should_update_counts and download_stream_current is not None: download_stream_current += int(response.http_response.headers.get('Content-Length', 0)) if data_stream_total is None: content_range = response.http_response.headers.get('Content-Range') @@ -65,15 +92,16 @@ async def send(self, request): data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1]) else: data_stream_total = download_stream_current - elif not will_retry and upload_stream_current is not None: + elif should_update_counts and upload_stream_current is not None: upload_stream_current += int(response.http_request.headers.get('Content-Length', 0)) for pipeline_obj in [request, response]: - pipeline_obj.context['data_stream_total'] = data_stream_total - pipeline_obj.context['download_stream_current'] = download_stream_current - pipeline_obj.context['upload_stream_current'] = upload_stream_current + if hasattr(pipeline_obj, 'context'): + pipeline_obj.context['data_stream_total'] = data_stream_total + pipeline_obj.context['download_stream_current'] = download_stream_current + pipeline_obj.context['upload_stream_current'] = upload_stream_current if response_callback: if asyncio.iscoroutine(response_callback): - await response_callback(response) + await response_callback(response) # type: ignore else: response_callback(response) request.context['response_callback'] = response_callback @@ -97,7 +125,7 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - if is_retry(response, retry_settings['mode']): + if is_retry(response, retry_settings['mode']) or await is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, @@ -112,6 +140,8 @@ async def send(self, request): continue break except AzureError as err: + if isinstance(err, AzureSigningError): + raise retries_remaining = self.increment( retry_settings, request=request.http_request, error=err) if retries_remaining: @@ -132,9 +162,23 @@ async def send(self, request): class ExponentialRetry(AsyncStorageRetryPolicy): """Exponential retry.""" - def __init__(self, initial_backoff=15, increment_base=3, retry_total=3, - retry_to_secondary=False, random_jitter_range=3, **kwargs): - ''' + initial_backoff: int + """The initial backoff interval, in seconds, for the first retry.""" + increment_base: int + """The base, in seconds, to increment the initial_backoff by after the + first retry.""" + random_jitter_range: int + """A number in seconds which indicates a range to jitter/randomize for the back-off interval.""" + + def __init__( + self, + initial_backoff: int = 15, + increment_base: int = 3, + retry_total: int = 3, + retry_to_secondary: bool = False, + random_jitter_range: int = 3, **kwargs + ) -> None: + """ Constructs an Exponential retry object. The initial_backoff is used for the first retry. Subsequent retries are retried after initial_backoff + increment_power^retry_count seconds. For example, by default the first retry @@ -155,17 +199,18 @@ def __init__(self, initial_backoff=15, increment_base=3, retry_total=3, :param int random_jitter_range: A number in seconds which indicates a range to jitter/randomize for the back-off interval. For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. - ''' + """ self.initial_backoff = initial_backoff self.increment_base = increment_base self.random_jitter_range = random_jitter_range super(ExponentialRetry, self).__init__( retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) - def get_backoff_time(self, settings): + def get_backoff_time(self, settings: Dict[str, Any]) -> float: """ Calculates how long to sleep before retrying. + :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time. :return: An integer indicating how long to wait before retrying the request, or None to indicate no retry should be performed. @@ -181,7 +226,18 @@ def get_backoff_time(self, settings): class LinearRetry(AsyncStorageRetryPolicy): """Linear retry.""" - def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_jitter_range=3, **kwargs): + initial_backoff: int + """The backoff interval, in seconds, between retries.""" + random_jitter_range: int + """A number in seconds which indicates a range to jitter/randomize for the back-off interval.""" + + def __init__( + self, backoff: int = 15, + retry_total: int = 3, + retry_to_secondary: bool = False, + random_jitter_range: int = 3, + **kwargs: Any + ) -> None: """ Constructs a Linear retry object. @@ -202,10 +258,11 @@ def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_j super(LinearRetry, self).__init__( retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) - def get_backoff_time(self, settings): + def get_backoff_time(self, settings: Dict[str, Any]) -> float: """ Calculates how long to sleep before retrying. + :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time. :return: An integer indicating how long to wait before retrying the request, or None to indicate no retry should be performed. @@ -218,3 +275,22 @@ def get_backoff_time(self, settings): if self.backoff > self.random_jitter_range else 0 random_range_end = self.backoff + self.random_jitter_range return random_generator.uniform(random_range_start, random_range_end) + + +class AsyncStorageBearerTokenCredentialPolicy(AsyncBearerTokenCredentialPolicy): + """ Custom Bearer token credential policy for following Storage Bearer challenges """ + + def __init__(self, credential: "AsyncTokenCredential", audience: str, **kwargs: Any) -> None: + super(AsyncStorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs) + + async def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool: + try: + auth_header = response.http_response.headers.get("WWW-Authenticate") + challenge = StorageHttpChallenge(auth_header) + except ValueError: + return False + + scope = challenge.resource_id + DEFAULT_OAUTH_SCOPE + await self.authorize_request(request, scope, tenant_id=challenge.tenant_id) + + return True diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/request_handlers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/request_handlers.py index 4f15b65a4b6d..54927cc73979 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/request_handlers.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/request_handlers.py @@ -4,22 +4,21 @@ # license information. # -------------------------------------------------------------------------- -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, Dict, List, Type, Tuple, - TYPE_CHECKING -) - import logging -from os import fstat +import stat from io import (SEEK_END, SEEK_SET, UnsupportedOperation) +from os import fstat +from typing import Dict, Optional import isodate -from azure.core.exceptions import raise_with_traceback - _LOGGER = logging.getLogger(__name__) +_REQUEST_DELIMITER_PREFIX = "batch_" +_HTTP1_1_IDENTIFIER = "HTTP/1.1" +_HTTP_LINE_ENDING = "\r\n" + def serialize_iso(attr): """Serialize Datetime object into ISO-8601 formatted string. @@ -37,17 +36,12 @@ def serialize_iso(attr): if utc.tm_year > 9999 or utc.tm_year < 1: raise OverflowError("Hit max or min date") - date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( - utc.tm_year, utc.tm_mon, utc.tm_mday, - utc.tm_hour, utc.tm_min, utc.tm_sec) + date = f"{utc.tm_year:04}-{utc.tm_mon:02}-{utc.tm_mday:02}T{utc.tm_hour:02}:{utc.tm_min:02}:{utc.tm_sec:02}" return date + 'Z' except (ValueError, OverflowError) as err: - msg = "Unable to serialize datetime object." - raise_with_traceback(ValueError, msg, err) + raise ValueError("Unable to serialize datetime object.") from err except AttributeError as err: - msg = "ISO-8601 object must be valid Datetime object." - raise_with_traceback(TypeError, msg, err) - + raise TypeError("ISO-8601 object must be valid datetime object.") from err def get_length(data): length = None @@ -66,7 +60,11 @@ def get_length(data): pass else: try: - return fstat(fileno).st_size + mode = fstat(fileno).st_mode + if stat.S_ISREG(mode) or stat.S_ISLNK(mode): + #st_size only meaningful if regular file or symlink, other types + # e.g. sockets may return misleading sizes like 0 + return fstat(fileno).st_size except OSError: # Not a valid fileno, may be possible requests returned # a socket number? @@ -78,7 +76,7 @@ def get_length(data): data.seek(0, SEEK_END) length = data.tell() - current_position data.seek(current_position, SEEK_SET) - except (AttributeError, UnsupportedOperation): + except (AttributeError, OSError, UnsupportedOperation): pass return length @@ -113,24 +111,24 @@ def validate_and_format_range_headers( # Page ranges must be 512 aligned if align_to_page: if start_range is not None and start_range % 512 != 0: - raise ValueError("Invalid page blob start_range: {0}. " - "The size must be aligned to a 512-byte boundary.".format(start_range)) + raise ValueError(f"Invalid page blob start_range: {start_range}. " + "The size must be aligned to a 512-byte boundary.") if end_range is not None and end_range % 512 != 511: - raise ValueError("Invalid page blob end_range: {0}. " - "The size must be aligned to a 512-byte boundary.".format(end_range)) + raise ValueError(f"Invalid page blob end_range: {end_range}. " + "The size must be aligned to a 512-byte boundary.") # Format based on whether end_range is present range_header = None if end_range is not None: - range_header = 'bytes={0}-{1}'.format(start_range, end_range) + range_header = f'bytes={start_range}-{end_range}' elif start_range is not None: - range_header = "bytes={0}-".format(start_range) + range_header = f"bytes={start_range}-" # Content MD5 can only be provided for a complete range less than 4MB in size range_validation = None if check_content_md5: if start_range is None or end_range is None: - raise ValueError("Both start and end range requied for MD5 content validation.") + raise ValueError("Both start and end range required for MD5 content validation.") if end_range - start_range > 4 * 1024 * 1024: raise ValueError("Getting content MD5 for a range greater than 4MB is not supported.") range_validation = 'true' @@ -143,5 +141,130 @@ def add_metadata_headers(metadata=None): headers = {} if metadata: for key, value in metadata.items(): - headers['x-ms-meta-{}'.format(key.strip())] = value.strip() if value else value + headers[f'x-ms-meta-{key.strip()}'] = value.strip() if value else value return headers + + +def serialize_batch_body(requests, batch_id): + """ + -- + + -- + (repeated as needed) + ---- + + Serializes the requests in this batch to a single HTTP mixed/multipart body. + + :param List[~azure.core.pipeline.transport.HttpRequest] requests: + a list of sub-request for the batch request + :param str batch_id: + to be embedded in batch sub-request delimiter + :returns: The body bytes for this batch. + :rtype: bytes + """ + + if requests is None or len(requests) == 0: + raise ValueError('Please provide sub-request(s) for this batch request') + + delimiter_bytes = (_get_batch_request_delimiter(batch_id, True, False) + _HTTP_LINE_ENDING).encode('utf-8') + newline_bytes = _HTTP_LINE_ENDING.encode('utf-8') + batch_body = [] + + content_index = 0 + for request in requests: + request.headers.update({ + "Content-ID": str(content_index), + "Content-Length": str(0) + }) + batch_body.append(delimiter_bytes) + batch_body.append(_make_body_from_sub_request(request)) + batch_body.append(newline_bytes) + content_index += 1 + + batch_body.append(_get_batch_request_delimiter(batch_id, True, True).encode('utf-8')) + # final line of body MUST have \r\n at the end, or it will not be properly read by the service + batch_body.append(newline_bytes) + + return b"".join(batch_body) + + +def _get_batch_request_delimiter(batch_id, is_prepend_dashes=False, is_append_dashes=False): + """ + Gets the delimiter used for this batch request's mixed/multipart HTTP format. + + :param str batch_id: + Randomly generated id + :param bool is_prepend_dashes: + Whether to include the starting dashes. Used in the body, but non on defining the delimiter. + :param bool is_append_dashes: + Whether to include the ending dashes. Used in the body on the closing delimiter only. + :returns: The delimiter, WITHOUT a trailing newline. + :rtype: str + """ + + prepend_dashes = '--' if is_prepend_dashes else '' + append_dashes = '--' if is_append_dashes else '' + + return prepend_dashes + _REQUEST_DELIMITER_PREFIX + batch_id + append_dashes + + +def _make_body_from_sub_request(sub_request): + """ + Content-Type: application/http + Content-ID: + Content-Transfer-Encoding: (if present) + + HTTP/ +
:
(repeated as necessary) + Content-Length: + (newline if content length > 0) + (if content length > 0) + + Serializes an http request. + + :param ~azure.core.pipeline.transport.HttpRequest sub_request: + Request to serialize. + :returns: The serialized sub-request in bytes + :rtype: bytes + """ + + # put the sub-request's headers into a list for efficient str concatenation + sub_request_body = [] + + # get headers for ease of manipulation; remove headers as they are used + headers = sub_request.headers + + # append opening headers + sub_request_body.append("Content-Type: application/http") + sub_request_body.append(_HTTP_LINE_ENDING) + + sub_request_body.append("Content-ID: ") + sub_request_body.append(headers.pop("Content-ID", "")) + sub_request_body.append(_HTTP_LINE_ENDING) + + sub_request_body.append("Content-Transfer-Encoding: binary") + sub_request_body.append(_HTTP_LINE_ENDING) + + # append blank line + sub_request_body.append(_HTTP_LINE_ENDING) + + # append HTTP verb and path and query and HTTP version + sub_request_body.append(sub_request.method) + sub_request_body.append(' ') + sub_request_body.append(sub_request.url) + sub_request_body.append(' ') + sub_request_body.append(_HTTP1_1_IDENTIFIER) + sub_request_body.append(_HTTP_LINE_ENDING) + + # append remaining headers (this will set the Content-Length, as it was set on `sub-request`) + for header_name, header_value in headers.items(): + if header_value is not None: + sub_request_body.append(header_name) + sub_request_body.append(": ") + sub_request_body.append(header_value) + sub_request_body.append(_HTTP_LINE_ENDING) + + # append blank line + sub_request_body.append(_HTTP_LINE_ENDING) + + return ''.join(sub_request_body).encode() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/response_handlers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/response_handlers.py index 4b591dd7877c..af9a2fcdcdc2 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/response_handlers.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/response_handlers.py @@ -3,29 +3,23 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, Dict, List, Type, Tuple, - TYPE_CHECKING -) import logging +from typing import NoReturn +from xml.etree.ElementTree import Element -from azure.core.pipeline.policies import ContentDecodePolicy from azure.core.exceptions import ( + ClientAuthenticationError, + DecodeError, HttpResponseError, - ResourceNotFoundError, - ResourceModifiedError, ResourceExistsError, - ClientAuthenticationError, - DecodeError) + ResourceModifiedError, + ResourceNotFoundError, +) +from azure.core.pipeline.policies import ContentDecodePolicy +from .authentication import AzureSigningError +from .models import get_enum_value, StorageErrorCode, UserDelegationKey from .parser import _to_utc_datetime -from .models import StorageErrorCode, UserDelegationKey, get_enum_value - - -if TYPE_CHECKING: - from datetime import datetime - from azure.core.exceptions import AzureError _LOGGER = logging.getLogger(__name__) @@ -44,10 +38,8 @@ def __init__(self, message, response, parts): super(PartialBatchErrorException, self).__init__(message=message, response=response) +# Parses the blob length from the content range header: bytes 1-3/65537 def parse_length_from_content_range(content_range): - ''' - Parses the blob length from the content range header: bytes 1-3/65537 - ''' if content_range is None: return None @@ -67,7 +59,10 @@ def normalize_headers(headers): def deserialize_metadata(response, obj, headers): # pylint: disable=unused-argument - raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.startswith("x-ms-meta-")} + try: + raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.lower().startswith('x-ms-meta-')} + except AttributeError: + raw_metadata = {k: v for k, v in response.headers.items() if k.lower().startswith('x-ms-meta-')} return {k[10:]: v for k, v in raw_metadata.items()} @@ -83,29 +78,59 @@ def return_context_and_deserialized(response, deserialized, response_headers): return response.http_response.location_mode, deserialized -def process_storage_error(storage_error): - # If storage_error is one of the two then it has already been processed and serialized to the specific exception. - if isinstance(storage_error, (PartialBatchErrorException, ClientAuthenticationError)): - raise storage_error +def return_raw_deserialized(response, *_): + return response.http_response.location_mode, response.context[ContentDecodePolicy.CONTEXT_NAME] + + +def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # pylint:disable=too-many-statements, too-many-branches raise_error = HttpResponseError + serialized = False + if isinstance(storage_error, AzureSigningError): + storage_error.message = storage_error.message + \ + '. This is likely due to an invalid shared key. Please check your shared key and try again.' + if not storage_error.response or storage_error.response.status_code in [200, 204]: + raise storage_error + # If it is one of those three then it has been serialized prior by the generated layer. + if isinstance(storage_error, (PartialBatchErrorException, + ClientAuthenticationError, ResourceNotFoundError, ResourceExistsError)): + serialized = True error_code = storage_error.response.headers.get('x-ms-error-code') error_message = storage_error.message additional_data = {} + error_dict = {} try: error_body = ContentDecodePolicy.deserialize_from_http_generics(storage_error.response) - if error_body: - for info in error_body.iter(): - if info.tag.lower() == 'code': - error_code = info.text - elif info.tag.lower() == 'message': - error_message = info.text - else: - additional_data[info.tag] = info.text + try: + if error_body is None or len(error_body) == 0: + error_body = storage_error.response.reason + except AttributeError: + error_body = '' + # If it is an XML response + if isinstance(error_body, Element): + error_dict = { + child.tag.lower(): child.text + for child in error_body + } + # If it is a JSON response + elif isinstance(error_body, dict): + error_dict = error_body.get('error', {}) + elif not error_code: + _LOGGER.warning( + 'Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.', type(error_body)) + error_dict = {'message': str(error_body)} + + # If we extracted from a Json or XML response + # There is a chance error_dict is just a string + if error_dict and isinstance(error_dict, dict): + error_code = error_dict.get('code') + error_message = error_dict.get('message') + additional_data = {k: v for k, v in error_dict.items() if k not in {'code', 'message'}} except DecodeError: pass try: - if error_code: + # This check would be unnecessary if we have already serialized the error + if error_code and not serialized: error_code = StorageErrorCode(error_code) if error_code in [StorageErrorCode.condition_not_met, StorageErrorCode.blob_overwritten]: @@ -137,17 +162,30 @@ def process_storage_error(storage_error): # Got an unknown error code pass + # Error message should include all the error properties try: - error_message += "\nErrorCode:{}".format(error_code.value) + error_message += f"\nErrorCode:{error_code.value}" except AttributeError: - error_message += "\nErrorCode:{}".format(error_code) + error_message += f"\nErrorCode:{error_code}" for name, info in additional_data.items(): - error_message += "\n{}:{}".format(name, info) - - error = raise_error(message=error_message, response=storage_error.response) + error_message += f"\n{name}:{info}" + + # No need to create an instance if it has already been serialized by the generated layer + if serialized: + storage_error.message = error_message + error = storage_error + else: + error = raise_error(message=error_message, response=storage_error.response) + # Ensure these properties are stored in the error instance as well (not just the error message) error.error_code = error_code error.additional_info = additional_data - error.raise_with_traceback() + # error.args is what's surfaced on the traceback - show error message in all cases + error.args = (error.message,) + try: + # `from None` prevents us from double printing the exception (suppresses generated layer error context) + exec("raise error from None") # pylint: disable=exec-used # nosec + except SyntaxError as exc: + raise error from exc def parse_to_internal_user_delegation_key(service_user_delegation_key): diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/shared_access_signature.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/shared_access_signature.py index cb0438f1d311..df29222b873e 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/shared_access_signature.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/shared_access_signature.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-keyword-should-match-keyword-only from datetime import date @@ -10,7 +11,8 @@ from .constants import X_MS_VERSION from . import sign_string, url_quote - +# cspell:ignoreRegExp rsc. +# cspell:ignoreRegExp s..?id class QueryStringConstants(object): SIGNED_SIGNATURE = 'sig' SIGNED_PERMISSION = 'sp' @@ -38,6 +40,7 @@ class QueryStringConstants(object): SIGNED_KEY_EXPIRY = 'ske' SIGNED_KEY_SERVICE = 'sks' SIGNED_KEY_VERSION = 'skv' + SIGNED_ENCRYPTION_SCOPE = 'ses' # for ADLS SIGNED_AUTHORIZED_OID = 'saoid' @@ -74,6 +77,7 @@ def to_list(): QueryStringConstants.SIGNED_KEY_EXPIRY, QueryStringConstants.SIGNED_KEY_SERVICE, QueryStringConstants.SIGNED_KEY_VERSION, + QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, # for ADLS QueryStringConstants.SIGNED_AUTHORIZED_OID, QueryStringConstants.SIGNED_UNAUTHORIZED_OID, @@ -103,13 +107,23 @@ def __init__(self, account_name, account_key, x_ms_version=X_MS_VERSION): self.account_key = account_key self.x_ms_version = x_ms_version - def generate_account(self, services, resource_types, permission, expiry, start=None, - ip=None, protocol=None): + def generate_account( + self, services, + resource_types, + permission, + expiry, + start=None, + ip=None, + protocol=None, + sts_hook=None, + **kwargs + ) -> str: ''' Generates a shared access signature for the account. Use the returned signature with the sas_token parameter of the service or to create a new account object. + :param Any services: The specified services associated with the shared access signature. :param ResourceTypes resource_types: Specifies the resource types that are accessible with the account SAS. You can combine values to provide access to more than one @@ -132,9 +146,8 @@ def generate_account(self, services, resource_types, permission, expiry, start=N :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. + storage service receives the request. The provided datetime will always + be interpreted as UTC. :type start: datetime or str :param str ip: Specifies an IP address or a range of IP addresses from which to accept requests. @@ -145,22 +158,39 @@ def generate_account(self, services, resource_types, permission, expiry, start=N :param str protocol: Specifies the protocol permitted for a request made. The default value is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values. + :keyword str encryption_scope: + Optional. If specified, this is the encryption scope to use when sending requests + authorized with this SAS URI. + :param sts_hook: + For debugging purposes only. If provided, the hook is called with the string to sign + that was used to generate the SAS. + :type sts_hook: Optional[Callable[[str], None]] + :returns: The generated SAS token for the account. + :rtype: str ''' sas = _SharedAccessHelper() sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) sas.add_account(services, resource_types) + sas.add_encryption_scope(**kwargs) sas.add_account_signature(self.account_name, self.account_key) + if sts_hook is not None: + sts_hook(sas.string_to_sign) + return sas.get_token() class _SharedAccessHelper(object): def __init__(self): self.query_dict = {} + self.string_to_sign = "" def _add_query(self, name, val): if val: - self.query_dict[name] = _str(val) if val is not None else None + self.query_dict[name] = str(val) if val is not None else None + + def add_encryption_scope(self, **kwargs): + self._add_query(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, kwargs.pop('encryption_scope', None)) def add_base(self, permission, expiry, start, ip, protocol, x_ms_version): if isinstance(start, date): @@ -211,10 +241,12 @@ def get_value_to_append(query): get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) + get_value_to_append(QueryStringConstants.SIGNED_IP) + get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) + - get_value_to_append(QueryStringConstants.SIGNED_VERSION)) + get_value_to_append(QueryStringConstants.SIGNED_VERSION) + + get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE)) self._add_query(QueryStringConstants.SIGNED_SIGNATURE, sign_string(account_key, string_to_sign)) + self.string_to_sign = string_to_sign - def get_token(self): - return '&'.join(['{0}={1}'.format(n, url_quote(v)) for n, v in self.query_dict.items() if v is not None]) + def get_token(self) -> str: + return '&'.join([f'{n}={url_quote(v)}' for n, v in self.query_dict.items() if v is not None]) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/uploads.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/uploads.py index acdc16f01ff7..b31cfb3291d9 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/uploads.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/uploads.py @@ -3,22 +3,18 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=no-self-use from concurrent import futures -from io import (BytesIO, IOBase, SEEK_CUR, SEEK_END, SEEK_SET, UnsupportedOperation) -from threading import Lock +from io import BytesIO, IOBase, SEEK_CUR, SEEK_END, SEEK_SET, UnsupportedOperation from itertools import islice from math import ceil - -import six +from threading import Lock from azure.core.tracing.common import with_current_context -from . import encode_base64, url_quote +from .import encode_base64, url_quote from .request_handlers import get_length from .response_handlers import return_response_headers -from .encryption import get_blob_encryptor_and_padder _LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 @@ -52,17 +48,9 @@ def upload_data_chunks( max_concurrency=None, stream=None, validate_content=None, - encryption_options=None, + progress_hook=None, **kwargs): - if encryption_options: - encryptor, padder = get_blob_encryptor_and_padder( - encryption_options.get('cek'), - encryption_options.get('vector'), - uploader_class is not PageBlobChunkUploader) - kwargs['encryptor'] = encryptor - kwargs['padder'] = padder - parallel = max_concurrency > 1 if parallel and 'modified_access_conditions' in kwargs: # Access conditions do not work with parallelism @@ -75,15 +63,16 @@ def upload_data_chunks( stream=stream, parallel=parallel, validate_content=validate_content, + progress_hook=progress_hook, **kwargs) if parallel: - executor = futures.ThreadPoolExecutor(max_concurrency) - upload_tasks = uploader.get_chunk_streams() - running_futures = [ - executor.submit(with_current_context(uploader.process_chunk), u) - for u in islice(upload_tasks, 0, max_concurrency) - ] - range_ids = _parallel_uploads(executor, uploader.process_chunk, upload_tasks, running_futures) + with futures.ThreadPoolExecutor(max_concurrency) as executor: + upload_tasks = uploader.get_chunk_streams() + running_futures = [ + executor.submit(with_current_context(uploader.process_chunk), u) + for u in islice(upload_tasks, 0, max_concurrency) + ] + range_ids = _parallel_uploads(executor, uploader.process_chunk, upload_tasks, running_futures) else: range_ids = [uploader.process_chunk(result) for result in uploader.get_chunk_streams()] if any(range_ids): @@ -98,6 +87,7 @@ def upload_substream_blocks( chunk_size=None, max_concurrency=None, stream=None, + progress_hook=None, **kwargs): parallel = max_concurrency > 1 if parallel and 'modified_access_conditions' in kwargs: @@ -109,24 +99,36 @@ def upload_substream_blocks( chunk_size=chunk_size, stream=stream, parallel=parallel, + progress_hook=progress_hook, **kwargs) if parallel: - executor = futures.ThreadPoolExecutor(max_concurrency) - upload_tasks = uploader.get_substream_blocks() - running_futures = [ - executor.submit(with_current_context(uploader.process_substream_block), u) - for u in islice(upload_tasks, 0, max_concurrency) - ] - range_ids = _parallel_uploads(executor, uploader.process_substream_block, upload_tasks, running_futures) + with futures.ThreadPoolExecutor(max_concurrency) as executor: + upload_tasks = uploader.get_substream_blocks() + running_futures = [ + executor.submit(with_current_context(uploader.process_substream_block), u) + for u in islice(upload_tasks, 0, max_concurrency) + ] + range_ids = _parallel_uploads(executor, uploader.process_substream_block, upload_tasks, running_futures) else: range_ids = [uploader.process_substream_block(b) for b in uploader.get_substream_blocks()] - return sorted(range_ids) + if any(range_ids): + return sorted(range_ids) + return [] class _ChunkUploader(object): # pylint: disable=too-many-instance-attributes - def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor=None, padder=None, **kwargs): + def __init__( + self, service, + total_size, + chunk_size, + stream, + parallel, + encryptor=None, + padder=None, + progress_hook=None, + **kwargs): self.service = service self.total_size = total_size self.chunk_size = chunk_size @@ -134,12 +136,12 @@ def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor= self.parallel = parallel # Stream management - self.stream_start = stream.tell() if parallel else None self.stream_lock = Lock() if parallel else None # Progress feedback self.progress_total = 0 self.progress_lock = Lock() if parallel else None + self.progress_hook = progress_hook # Encryption self.encryptor = encryptor @@ -160,7 +162,7 @@ def get_chunk_streams(self): if self.total_size: read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data))) temp = self.stream.read(read_size) - if not isinstance(temp, six.binary_type): + if not isinstance(temp, bytes): raise TypeError("Blob data should be of type bytes.") data += temp or b"" @@ -197,6 +199,9 @@ def _update_progress(self, length): else: self.progress_total += length + if self.progress_hook: + self.progress_hook(self.progress_total, self.total_size) + def _upload_chunk(self, chunk_offset, chunk_data): raise NotImplementedError("Must be implemented by child class.") @@ -221,16 +226,16 @@ def get_substream_blocks(self): for i in range(blocks): index = i * self.chunk_size length = last_block_size if i == blocks - 1 else self.chunk_size - yield ('BlockId{}'.format("%05d" % i), SubStream(self.stream, index, length, lock)) + yield index, SubStream(self.stream, index, length, lock) def process_substream_block(self, block_data): return self._upload_substream_block_with_progress(block_data[0], block_data[1]) - def _upload_substream_block(self, block_id, block_stream): + def _upload_substream_block(self, index, block_stream): raise NotImplementedError("Must be implemented by child class.") - def _upload_substream_block_with_progress(self, block_id, block_stream): - range_id = self._upload_substream_block(block_id, block_stream) + def _upload_substream_block_with_progress(self, index, block_stream): + range_id = self._upload_substream_block(index, block_stream) self._update_progress(len(block_stream)) return range_id @@ -248,7 +253,7 @@ def __init__(self, *args, **kwargs): def _upload_chunk(self, chunk_offset, chunk_data): # TODO: This is incorrect, but works with recording. - index = '{0:032d}'.format(chunk_offset) + index = f'{chunk_offset:032d}' block_id = encode_base64(url_quote(encode_base64(index))) self.service.stage_block( block_id, @@ -260,8 +265,9 @@ def _upload_chunk(self, chunk_offset, chunk_data): ) return index, block_id - def _upload_substream_block(self, block_id, block_stream): + def _upload_substream_block(self, index, block_stream): try: + block_id = f'BlockId{(index//self.chunk_size):05}' self.service.stage_block( block_id, len(block_stream), @@ -275,7 +281,7 @@ def _upload_substream_block(self, block_id, block_stream): return block_id -class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method +class PageBlobChunkUploader(_ChunkUploader): def _is_chunk_empty(self, chunk_data): # read until non-zero byte is encountered @@ -286,7 +292,7 @@ def _upload_chunk(self, chunk_offset, chunk_data): # avoid uploading the empty pages if not self._is_chunk_empty(chunk_data): chunk_end = chunk_offset + len(chunk_data) - 1 - content_range = "bytes={0}-{1}".format(chunk_offset, chunk_end) + content_range = f"bytes={chunk_offset}-{chunk_end}" computed_md5 = None self.response_headers = self.service.upload_pages( body=chunk_data, @@ -302,8 +308,11 @@ def _upload_chunk(self, chunk_offset, chunk_data): if not self.parallel and self.request_options.get('modified_access_conditions'): self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + def _upload_substream_block(self, index, block_stream): + pass + -class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method +class AppendBlobChunkUploader(_ChunkUploader): def __init__(self, *args, **kwargs): super(AppendBlobChunkUploader, self).__init__(*args, **kwargs) @@ -332,8 +341,43 @@ def _upload_chunk(self, chunk_offset, chunk_data): **self.request_options ) + def _upload_substream_block(self, index, block_stream): + pass + + +class DataLakeFileChunkUploader(_ChunkUploader): + + def _upload_chunk(self, chunk_offset, chunk_data): + # avoid uploading the empty pages + self.response_headers = self.service.append_data( + body=chunk_data, + position=chunk_offset, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + + if not self.parallel and self.request_options.get('modified_access_conditions'): + self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + + def _upload_substream_block(self, index, block_stream): + try: + self.service.append_data( + body=block_stream, + position=index, + content_length=len(block_stream), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + finally: + block_stream.close() + -class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method +class FileChunkUploader(_ChunkUploader): def _upload_chunk(self, chunk_offset, chunk_data): length = len(chunk_data) @@ -346,7 +390,11 @@ def _upload_chunk(self, chunk_offset, chunk_data): upload_stream_current=self.progress_total, **self.request_options ) - return 'bytes={0}-{1}'.format(chunk_offset, chunk_end), response + return f'bytes={chunk_offset}-{chunk_end}', response + + # TODO: Implement this method. + def _upload_substream_block(self, index, block_stream): + pass class SubStream(IOBase): @@ -358,8 +406,8 @@ def __init__(self, wrapped_stream, stream_begin_index, length, lockObj): try: # only the main thread runs this, so there's no need grabbing the lock wrapped_stream.seek(0, SEEK_CUR) - except: - raise ValueError("Wrapped stream must support seek().") + except Exception as exc: + raise ValueError("Wrapped stream must support seek().") from exc self._lock = lockObj self._wrapped_stream = wrapped_stream @@ -432,6 +480,13 @@ def read(self, size=None): raise IOError("Stream failed to seek to the desired location.") buffer_from_stream = self._wrapped_stream.read(current_max_buffer_size) else: + absolute_position = self._stream_begin_index + self._position + # It's possible that there's connection problem during data transfer, + # so when we retry we don't want to read from current position of wrapped stream, + # instead we should seek to where we want to read from. + if self._wrapped_stream.tell() != absolute_position: + self._wrapped_stream.seek(absolute_position, SEEK_SET) + buffer_from_stream = self._wrapped_stream.read(current_max_buffer_size) if buffer_from_stream: @@ -523,13 +578,11 @@ def seekable(self): def __next__(self): return next(self.iterator) - next = __next__ # Python 2 compatibility. - def tell(self, *args, **kwargs): raise UnsupportedOperation("Data generator does not support tell.") def seek(self, *args, **kwargs): - raise UnsupportedOperation("Data generator is unseekable.") + raise UnsupportedOperation("Data generator is not seekable.") def read(self, size): data = self.leftover @@ -537,14 +590,15 @@ def read(self, size): try: while count < size: chunk = self.__next__() - if isinstance(chunk, six.text_type): + if isinstance(chunk, str): chunk = chunk.encode(self.encoding) data += chunk count += len(chunk) + # This means count < size and what's leftover will be returned in this call. except StopIteration: - pass + self.leftover = b"" - if count > size: + if count >= size: self.leftover = data[size:] return data[:size] diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/uploads_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/uploads_async.py index e598165330ae..3e102ec5daef 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/uploads_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared/uploads_async.py @@ -3,26 +3,40 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=no-self-use import asyncio +import inspect +import threading from asyncio import Lock +from io import UnsupportedOperation from itertools import islice -import threading - from math import ceil +from typing import AsyncGenerator, Union -import six - -from . import encode_base64, url_quote +from .import encode_base64, url_quote from .request_handlers import get_length from .response_handlers import return_response_headers -from .encryption import get_blob_encryptor_and_padder from .uploads import SubStream, IterStreamer # pylint: disable=unused-import -_LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 -_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = '{0} should be a seekable file-like/io.IOBase type stream object.' +async def _async_parallel_uploads(uploader, pending, running): + range_ids = [] + while True: + # Wait for some download to finish before adding a new one + done, running = await asyncio.wait(running, return_when=asyncio.FIRST_COMPLETED) + range_ids.extend([chunk.result() for chunk in done]) + try: + for _ in range(0, len(done)): + next_chunk = await pending.__anext__() + running.add(asyncio.ensure_future(uploader(next_chunk))) + except StopAsyncIteration: + break + + # Wait for the remaining uploads to finish + if running: + done, _running = await asyncio.wait(running) + range_ids.extend([chunk.result() for chunk in done]) + return range_ids async def _parallel_uploads(uploader, pending, running): @@ -52,17 +66,9 @@ async def upload_data_chunks( chunk_size=None, max_concurrency=None, stream=None, - encryption_options=None, + progress_hook=None, **kwargs): - if encryption_options: - encryptor, padder = get_blob_encryptor_and_padder( - encryption_options.get('cek'), - encryption_options.get('vector'), - uploader_class is not PageBlobChunkUploader) - kwargs['encryptor'] = encryptor - kwargs['padder'] = padder - parallel = max_concurrency > 1 if parallel and 'modified_access_conditions' in kwargs: # Access conditions do not work with parallelism @@ -74,18 +80,23 @@ async def upload_data_chunks( chunk_size=chunk_size, stream=stream, parallel=parallel, + progress_hook=progress_hook, **kwargs) if parallel: upload_tasks = uploader.get_chunk_streams() - running_futures = [ - asyncio.ensure_future(uploader.process_chunk(u)) - for u in islice(upload_tasks, 0, max_concurrency) - ] - range_ids = await _parallel_uploads(uploader.process_chunk, upload_tasks, running_futures) + running_futures = [] + for _ in range(max_concurrency): + try: + chunk = await upload_tasks.__anext__() + running_futures.append(asyncio.ensure_future(uploader.process_chunk(chunk))) + except StopAsyncIteration: + break + + range_ids = await _async_parallel_uploads(uploader.process_chunk, upload_tasks, running_futures) else: range_ids = [] - for chunk in uploader.get_chunk_streams(): + async for chunk in uploader.get_chunk_streams(): range_ids.append(await uploader.process_chunk(chunk)) if any(range_ids): @@ -100,6 +111,7 @@ async def upload_substream_blocks( chunk_size=None, max_concurrency=None, stream=None, + progress_hook=None, **kwargs): parallel = max_concurrency > 1 if parallel and 'modified_access_conditions' in kwargs: @@ -111,6 +123,7 @@ async def upload_substream_blocks( chunk_size=chunk_size, stream=stream, parallel=parallel, + progress_hook=progress_hook, **kwargs) if parallel: @@ -124,12 +137,23 @@ async def upload_substream_blocks( range_ids = [] for block in uploader.get_substream_blocks(): range_ids.append(await uploader.process_substream_block(block)) - return sorted(range_ids) + if any(range_ids): + return sorted(range_ids) + return class _ChunkUploader(object): # pylint: disable=too-many-instance-attributes - def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor=None, padder=None, **kwargs): + def __init__( + self, service, + total_size, + chunk_size, + stream, + parallel, + encryptor=None, + padder=None, + progress_hook=None, + **kwargs): self.service = service self.total_size = total_size self.chunk_size = chunk_size @@ -137,12 +161,12 @@ def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor= self.parallel = parallel # Stream management - self.stream_start = stream.tell() if parallel else None self.stream_lock = threading.Lock() if parallel else None # Progress feedback self.progress_total = 0 self.progress_lock = Lock() if parallel else None + self.progress_hook = progress_hook # Encryption self.encryptor = encryptor @@ -152,7 +176,7 @@ def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor= self.last_modified = None self.request_options = kwargs - def get_chunk_streams(self): + async def get_chunk_streams(self): index = 0 while True: data = b'' @@ -163,7 +187,9 @@ def get_chunk_streams(self): if self.total_size: read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data))) temp = self.stream.read(read_size) - if not isinstance(temp, six.binary_type): + if inspect.isawaitable(temp): + temp = await temp + if not isinstance(temp, bytes): raise TypeError('Blob data should be of type bytes.') data += temp or b"" @@ -200,6 +226,9 @@ async def _update_progress(self, length): else: self.progress_total += length + if self.progress_hook: + await self.progress_hook(self.progress_total, self.total_size) + async def _upload_chunk(self, chunk_offset, chunk_data): raise NotImplementedError("Must be implemented by child class.") @@ -224,16 +253,16 @@ def get_substream_blocks(self): for i in range(blocks): index = i * self.chunk_size length = last_block_size if i == blocks - 1 else self.chunk_size - yield ('BlockId{}'.format("%05d" % i), SubStream(self.stream, index, length, lock)) + yield index, SubStream(self.stream, index, length, lock) async def process_substream_block(self, block_data): return await self._upload_substream_block_with_progress(block_data[0], block_data[1]) - async def _upload_substream_block(self, block_id, block_stream): + async def _upload_substream_block(self, index, block_stream): raise NotImplementedError("Must be implemented by child class.") - async def _upload_substream_block_with_progress(self, block_id, block_stream): - range_id = await self._upload_substream_block(block_id, block_stream) + async def _upload_substream_block_with_progress(self, index, block_stream): + range_id = await self._upload_substream_block(index, block_stream) await self._update_progress(len(block_stream)) return range_id @@ -251,19 +280,20 @@ def __init__(self, *args, **kwargs): async def _upload_chunk(self, chunk_offset, chunk_data): # TODO: This is incorrect, but works with recording. - index = '{0:032d}'.format(chunk_offset) + index = f'{chunk_offset:032d}' block_id = encode_base64(url_quote(encode_base64(index))) await self.service.stage_block( block_id, len(chunk_data), - chunk_data, + body=chunk_data, data_stream_total=self.total_size, upload_stream_current=self.progress_total, **self.request_options) return index, block_id - async def _upload_substream_block(self, block_id, block_stream): + async def _upload_substream_block(self, index, block_stream): try: + block_id = f'BlockId{(index//self.chunk_size):05}' await self.service.stage_block( block_id, len(block_stream), @@ -276,7 +306,7 @@ async def _upload_substream_block(self, block_id, block_stream): return block_id -class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method +class PageBlobChunkUploader(_ChunkUploader): def _is_chunk_empty(self, chunk_data): # read until non-zero byte is encountered @@ -290,7 +320,7 @@ async def _upload_chunk(self, chunk_offset, chunk_data): # avoid uploading the empty pages if not self._is_chunk_empty(chunk_data): chunk_end = chunk_offset + len(chunk_data) - 1 - content_range = 'bytes={0}-{1}'.format(chunk_offset, chunk_end) + content_range = f'bytes={chunk_offset}-{chunk_end}' computed_md5 = None self.response_headers = await self.service.upload_pages( body=chunk_data, @@ -305,8 +335,11 @@ async def _upload_chunk(self, chunk_offset, chunk_data): if not self.parallel and self.request_options.get('modified_access_conditions'): self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + async def _upload_substream_block(self, index, block_stream): + pass + -class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method +class AppendBlobChunkUploader(_ChunkUploader): def __init__(self, *args, **kwargs): super(AppendBlobChunkUploader, self).__init__(*args, **kwargs) @@ -333,18 +366,95 @@ async def _upload_chunk(self, chunk_offset, chunk_data): upload_stream_current=self.progress_total, **self.request_options) + async def _upload_substream_block(self, index, block_stream): + pass -class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + +class DataLakeFileChunkUploader(_ChunkUploader): async def _upload_chunk(self, chunk_offset, chunk_data): - chunk_end = chunk_offset + len(chunk_data) - 1 + self.response_headers = await self.service.append_data( + body=chunk_data, + position=chunk_offset, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + + if not self.parallel and self.request_options.get('modified_access_conditions'): + self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + + async def _upload_substream_block(self, index, block_stream): + try: + await self.service.append_data( + body=block_stream, + position=index, + content_length=len(block_stream), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + finally: + block_stream.close() + + +class FileChunkUploader(_ChunkUploader): + + async def _upload_chunk(self, chunk_offset, chunk_data): + length = len(chunk_data) + chunk_end = chunk_offset + length - 1 response = await self.service.upload_range( chunk_data, chunk_offset, - chunk_end, + length, data_stream_total=self.total_size, upload_stream_current=self.progress_total, **self.request_options ) - range_id = 'bytes={0}-{1}'.format(chunk_offset, chunk_end) + range_id = f'bytes={chunk_offset}-{chunk_end}' return range_id, response + + # TODO: Implement this method. + async def _upload_substream_block(self, index, block_stream): + pass + + +class AsyncIterStreamer(): + """ + File-like streaming object for AsyncGenerators. + """ + def __init__(self, generator: AsyncGenerator[Union[bytes, str], None], encoding: str = "UTF-8"): + self.iterator = generator.__aiter__() + self.leftover = b"" + self.encoding = encoding + + def seekable(self): + return False + + def tell(self, *args, **kwargs): + raise UnsupportedOperation("Data generator does not support tell.") + + def seek(self, *args, **kwargs): + raise UnsupportedOperation("Data generator is not seekable.") + + async def read(self, size: int) -> bytes: + data = self.leftover + count = len(self.leftover) + try: + while count < size: + chunk = await self.iterator.__anext__() + if isinstance(chunk, str): + chunk = chunk.encode(self.encoding) + data += chunk + count += len(chunk) + # This means count < size and what's leftover will be returned in this call. + except StopAsyncIteration: + self.leftover = b"" + + if count >= size: + self.leftover = data[size:] + + return data[:size] diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared_access_signature.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared_access_signature.py index 890ef1b2eeac..a3005be2ac64 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared_access_signature.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_shared_access_signature.py @@ -3,26 +3,22 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-keyword-should-match-keyword-only -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, TYPE_CHECKING +from typing import ( + Any, Callable, Optional, Union, + TYPE_CHECKING ) +from urllib.parse import parse_qs from ._shared import sign_string, url_quote from ._shared.constants import X_MS_VERSION -from ._shared.models import Services -from ._shared.shared_access_signature import SharedAccessSignature, _SharedAccessHelper, \ - QueryStringConstants +from ._shared.models import Services, UserDelegationKey +from ._shared.shared_access_signature import QueryStringConstants, SharedAccessSignature, _SharedAccessHelper if TYPE_CHECKING: from datetime import datetime - from ..blob import ( - ResourceTypes, - AccountSasPermissions, - UserDelegationKey, - ContainerSasPermissions, - BlobSasPermissions - ) + from ..blob import AccountSasPermissions, BlobSasPermissions, ContainerSasPermissions, ResourceTypes class BlobQueryStringConstants(object): @@ -37,13 +33,17 @@ class BlobSharedAccessSignature(SharedAccessSignature): generate_*_shared_access_signature method directly. ''' - def __init__(self, account_name, account_key=None, user_delegation_key=None): + def __init__( + self, account_name: str, + account_key: Optional[str] = None, + user_delegation_key: Optional[UserDelegationKey] = None + ) -> None: ''' :param str account_name: The storage account name used to generate the shared access signatures. - :param str account_key: + :param Optional[str] account_key: The access key to generate the shares access signatures. - :param ~azure.storage.blob.models.UserDelegationKey user_delegation_key: + :param Optional[~azure.storage.blob.models.UserDelegationKey] user_delegation_key: Instead of an account key, the user could pass in a user delegation key. A user delegation key can be obtained from the service by authenticating with an AAD identity; this can be accomplished by calling get_user_delegation_key on any Blob service object. @@ -51,11 +51,25 @@ def __init__(self, account_name, account_key=None, user_delegation_key=None): super(BlobSharedAccessSignature, self).__init__(account_name, account_key, x_ms_version=X_MS_VERSION) self.user_delegation_key = user_delegation_key - def generate_blob(self, container_name, blob_name, snapshot=None, version_id=None, permission=None, - expiry=None, start=None, policy_id=None, ip=None, protocol=None, - cache_control=None, content_disposition=None, - content_encoding=None, content_language=None, - content_type=None, **kwargs): + def generate_blob( + self, container_name: str, + blob_name: str, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + permission: Optional[Union["BlobSasPermissions", str]] = None, + expiry: Optional[Union["datetime", str]] = None, + start: Optional[Union["datetime", str]] = None, + policy_id: Optional[str] = None, + ip: Optional[str] = None, + protocol: Optional[str] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_type: Optional[str] = None, + sts_hook: Optional[Callable[[str], None]] = None, + **kwargs: Any + ) -> str: ''' Generates a shared access signature for the blob or one of its snapshots. Use the returned signature with the sas_token parameter of any BlobService. @@ -65,15 +79,21 @@ def generate_blob(self, container_name, blob_name, snapshot=None, version_id=Non :param str blob_name: Name of blob. :param str snapshot: - The snapshot parameter is an opaque DateTime value that, + The snapshot parameter is an opaque datetime value that, when present, specifies the blob snapshot to grant permission. - :param BlobSasPermissions permission: + :param str version_id: + An optional blob version ID. This parameter is only applicable for versioning-enabled + Storage accounts. Note that the 'versionid' query parameter is not included in the output + SAS. Therefore, please provide the 'version_id' parameter to any APIs when using the output + SAS to operate on a specific version. + :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. - Permissions must be ordered read, write, delete, list. + Permissions must be ordered racwdxytmei. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. + :type permission: str or BlobSasPermissions :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy @@ -85,9 +105,8 @@ def generate_blob(self, container_name, blob_name, snapshot=None, version_id=Non :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. + storage service receives the request. The provided datetime will always + be interpreted as UTC. :type start: datetime or str :param str policy_id: A unique value up to 64 characters in length that correlates to a @@ -117,6 +136,12 @@ def generate_blob(self, container_name, blob_name, snapshot=None, version_id=Non :param str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. + :param sts_hook: + For debugging purposes only. If provided, the hook is called with the string to sign + that was used to generate the SAS. + :type sts_hook: Optional[Callable[[str], None]] + :return: A Shared Access Signature (sas) token. + :rtype: str ''' resource_path = container_name + '/' + blob_name @@ -133,30 +158,46 @@ def generate_blob(self, container_name, blob_name, snapshot=None, version_id=Non sas.add_override_response_headers(cache_control, content_disposition, content_encoding, content_language, content_type) + sas.add_encryption_scope(**kwargs) sas.add_info_for_hns_account(**kwargs) sas.add_resource_signature(self.account_name, self.account_key, resource_path, user_delegation_key=self.user_delegation_key) + if sts_hook is not None: + sts_hook(sas.string_to_sign) + return sas.get_token() - def generate_container(self, container_name, permission=None, expiry=None, - start=None, policy_id=None, ip=None, protocol=None, - cache_control=None, content_disposition=None, - content_encoding=None, content_language=None, - content_type=None, **kwargs): + def generate_container( + self, container_name: str, + permission: Optional[Union["ContainerSasPermissions", str]] = None, + expiry: Optional[Union["datetime", str]] = None, + start: Optional[Union["datetime", str]] = None, + policy_id: Optional[str] = None, + ip: Optional[str] = None, + protocol: Optional[str] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_type: Optional[str] = None, + sts_hook: Optional[Callable[[str], None]] = None, + **kwargs: Any + ) -> str: ''' Generates a shared access signature for the container. Use the returned signature with the sas_token parameter of any BlobService. :param str container_name: Name of container. - :param ContainerSasPermissions permission: + :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. - Permissions must be ordered read, write, delete, list. + Permissions must be ordered racwdxyltfmei. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. + :type permission: str or ContainerSasPermissions :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy @@ -168,9 +209,8 @@ def generate_container(self, container_name, permission=None, expiry=None, :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. + storage service receives the request. The provided datetime will always + be interpreted as UTC. :type start: datetime or str :param str policy_id: A unique value up to 64 characters in length that correlates to a @@ -200,6 +240,12 @@ def generate_container(self, container_name, permission=None, expiry=None, :param str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. + :param sts_hook: + For debugging purposes only. If provided, the hook is called with the string to sign + that was used to generate the SAS. + :type sts_hook: Optional[Callable[[str], None]] + :return: A Shared Access Signature (sas) token. + :rtype: str ''' sas = _BlobSharedAccessHelper() sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) @@ -208,9 +254,14 @@ def generate_container(self, container_name, permission=None, expiry=None, sas.add_override_response_headers(cache_control, content_disposition, content_encoding, content_language, content_type) + sas.add_encryption_scope(**kwargs) sas.add_info_for_hns_account(**kwargs) sas.add_resource_signature(self.account_name, self.account_key, container_name, user_delegation_key=self.user_delegation_key) + + if sts_hook is not None: + sts_hook(sas.string_to_sign) + return sas.get_token() @@ -230,7 +281,6 @@ def get_value_to_append(self, query): return return_value + '\n' def add_resource_signature(self, account_name, account_key, path, user_delegation_key=None): - # pylint: disable = no-member if path[0] != '/': path = '/' + path @@ -271,6 +321,7 @@ def add_resource_signature(self, account_name, account_key, path, user_delegatio self.get_value_to_append(QueryStringConstants.SIGNED_VERSION) + self.get_value_to_append(QueryStringConstants.SIGNED_RESOURCE) + self.get_value_to_append(BlobQueryStringConstants.SIGNED_TIMESTAMP) + + self.get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE) + self.get_value_to_append(QueryStringConstants.SIGNED_CACHE_CONTROL) + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_DISPOSITION) + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_ENCODING) + @@ -284,25 +335,29 @@ def add_resource_signature(self, account_name, account_key, path, user_delegatio self._add_query(QueryStringConstants.SIGNED_SIGNATURE, sign_string(account_key if user_delegation_key is None else user_delegation_key.value, string_to_sign)) + self.string_to_sign = string_to_sign - def get_token(self): + def get_token(self) -> str: # a conscious decision was made to exclude the timestamp in the generated token # this is to avoid having two snapshot ids in the query parameters when the user appends the snapshot timestamp exclude = [BlobQueryStringConstants.SIGNED_TIMESTAMP] - return '&'.join(['{0}={1}'.format(n, url_quote(v)) + return '&'.join([f'{n}={url_quote(v)}' for n, v in self.query_dict.items() if v is not None and n not in exclude]) def generate_account_sas( - account_name, # type: str - account_key, # type: str - resource_types, # type: Union[ResourceTypes, str] - permission, # type: Union[AccountSasPermissions, str] - expiry, # type: Optional[Union[datetime, str]] - start=None, # type: Optional[Union[datetime, str]] - ip=None, # type: Optional[str] - **kwargs # type: Any - ): # type: (...) -> str + account_name: str, + account_key: str, + resource_types: Union["ResourceTypes", str], + permission: Union["AccountSasPermissions", str], + expiry: Union["datetime", str], + start: Optional[Union["datetime", str]] = None, + ip: Optional[str] = None, + *, + services: Union[Services, str] = Services(blob=True), + sts_hook: Optional[Callable[[str], None]] = None, + **kwargs: Any +) -> str: """Generates a shared access signature for the blob service. Use the returned signature with the credential parameter of any BlobServiceClient, @@ -318,24 +373,16 @@ def generate_account_sas( :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has been - specified in an associated stored access policy. :type permission: str or ~azure.storage.blob.AccountSasPermissions :param expiry: The time at which the shared access signature becomes invalid. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has - been specified in an associated stored access policy. Azure will always - convert values to UTC. If a date is passed in without timezone info, it - is assumed to be UTC. + The provided datetime will always be interpreted as UTC. :type expiry: ~datetime.datetime or str :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. + storage service receives the request. The provided datetime will always + be interpreted as UTC. :type start: ~datetime.datetime or str :param str ip: Specifies an IP address or a range of IP addresses from which to accept requests. @@ -343,8 +390,17 @@ def generate_account_sas( or address range specified on the SAS token, the request is not authenticated. For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. + :keyword Union[Services, str] services: + Specifies the services that the Shared Access Signature (sas) token will be able to be utilized with. + Will default to only this package (i.e. blobs) if not provided. :keyword str protocol: Specifies the protocol permitted for a request made. The default value is https. + :keyword str encryption_scope: + Specifies the encryption scope for a request made so that all write operations will be service encrypted. + :keyword sts_hook: + For debugging purposes only. If provided, the hook is called with the string to sign + that was used to generate the SAS. + :paramtype sts_hook: Optional[Callable[[str], None]] :return: A Shared Access Signature (sas) token. :rtype: str @@ -359,29 +415,31 @@ def generate_account_sas( """ sas = SharedAccessSignature(account_name, account_key) return sas.generate_account( - services=Services(blob=True), + services=services, resource_types=resource_types, permission=permission, expiry=expiry, start=start, ip=ip, + sts_hook=sts_hook, **kwargs - ) # type: ignore + ) def generate_container_sas( - account_name, # type: str - container_name, # type: str - account_key=None, # type: Optional[str] - user_delegation_key=None, # type: Optional[UserDelegationKey] - permission=None, # type: Optional[Union[ContainerSasPermissions, str]] - expiry=None, # type: Optional[Union[datetime, str]] - start=None, # type: Optional[Union[datetime, str]] - policy_id=None, # type: Optional[str] - ip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Any + account_name: str, + container_name: str, + account_key: Optional[str] = None, + user_delegation_key: Optional[UserDelegationKey] = None, + permission: Optional[Union["ContainerSasPermissions", str]] = None, + expiry: Optional[Union["datetime", str]] = None, + start: Optional[Union["datetime", str]] = None, + policy_id: Optional[str] = None, + ip: Optional[str] = None, + *, + sts_hook: Optional[Callable[[str], None]] = None, + **kwargs: Any +) -> str: """Generates a shared access signature for a container. Use the returned signature with the credential parameter of any BlobServiceClient, @@ -402,7 +460,7 @@ def generate_container_sas( :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. - Permissions must be ordered read, write, delete, list. + Permissions must be ordered racwdxyltfmei. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. @@ -418,9 +476,8 @@ def generate_container_sas( :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. + storage service receives the request. The provided datetime will always + be interpreted as UTC. :type start: ~datetime.datetime or str :param str policy_id: A unique value up to 64 characters in length that correlates to a @@ -449,6 +506,15 @@ def generate_container_sas( :keyword str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. + :keyword str encryption_scope: + Specifies the encryption scope for a request made so that all write operations will be service encrypted. + :keyword str correlation_id: + The correlation id to correlate the storage audit logs with the audit logs used by the principal + generating and distributing the SAS. This can only be used when generating a SAS with delegation key. + :keyword sts_hook: + For debugging purposes only. If provided, the hook is called with the string to sign + that was used to generate the SAS. + :paramtype sts_hook: Optional[Callable[[str], None]] :return: A Shared Access Signature (sas) token. :rtype: str @@ -461,9 +527,15 @@ def generate_container_sas( :dedent: 12 :caption: Generating a sas token. """ + if not policy_id: + if not expiry: + raise ValueError("'expiry' parameter must be provided when not using a stored access policy.") + if not permission: + raise ValueError("'permission' parameter must be provided when not using a stored access policy.") if not user_delegation_key and not account_key: raise ValueError("Either user_delegation_key or account_key must be provided.") - + if isinstance(account_key, UserDelegationKey): + user_delegation_key = account_key if user_delegation_key: sas = BlobSharedAccessSignature(account_name, user_delegation_key=user_delegation_key) else: @@ -475,25 +547,27 @@ def generate_container_sas( start=start, policy_id=policy_id, ip=ip, + sts_hook=sts_hook, **kwargs ) def generate_blob_sas( - account_name, # type: str - container_name, # type: str - blob_name, # type: str - snapshot=None, # type: Optional[str] - account_key=None, # type: Optional[str] - user_delegation_key=None, # type: Optional[UserDelegationKey] - permission=None, # type: Optional[Union[BlobSasPermissions, str]] - expiry=None, # type: Optional[Union[datetime, str]] - start=None, # type: Optional[Union[datetime, str]] - policy_id=None, # type: Optional[str] - ip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Any + account_name: str, + container_name: str, + blob_name: str, + snapshot: Optional[str] = None, + account_key: Optional[str] = None, + user_delegation_key: Optional[UserDelegationKey] = None, + permission: Optional[Union["BlobSasPermissions", str]] = None, + expiry: Optional[Union["datetime", str]] = None, + start: Optional[Union["datetime", str]] = None, + policy_id: Optional[str] = None, + ip: Optional[str] = None, + *, + sts_hook: Optional[Callable[[str], None]] = None, + **kwargs: Any +) -> str: """Generates a shared access signature for a blob. Use the returned signature with the credential parameter of any BlobServiceClient, @@ -518,7 +592,7 @@ def generate_blob_sas( :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. - Permissions must be ordered read, write, delete, list. + Permissions must be ordered racwdxytmei. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. @@ -534,9 +608,8 @@ def generate_blob_sas( :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. + storage service receives the request. The provided datetime will always + be interpreted as UTC. :type start: ~datetime.datetime or str :param str policy_id: A unique value up to 64 characters in length that correlates to a @@ -549,7 +622,10 @@ def generate_blob_sas( For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. :keyword str version_id: - An optional blob version ID. This parameter is only for versioning enabled account + An optional blob version ID. This parameter is only applicable for versioning-enabled + Storage accounts. Note that the 'versionid' query parameter is not included in the output + SAS. Therefore, please provide the 'version_id' parameter to any APIs when using the output + SAS to operate on a specific version. .. versionadded:: 12.4.0 This keyword argument was introduced in API version '2019-12-12'. @@ -570,11 +646,27 @@ def generate_blob_sas( :keyword str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. + :keyword str encryption_scope: + Specifies the encryption scope for a request made so that all write operations will be service encrypted. + :keyword str correlation_id: + The correlation id to correlate the storage audit logs with the audit logs used by the principal + generating and distributing the SAS. This can only be used when generating a SAS with delegation key. + :keyword sts_hook: + For debugging purposes only. If provided, the hook is called with the string to sign + that was used to generate the SAS. + :paramtype sts_hook: Optional[Callable[[str], None]] :return: A Shared Access Signature (sas) token. :rtype: str """ + if not policy_id: + if not expiry: + raise ValueError("'expiry' parameter must be provided when not using a stored access policy.") + if not permission: + raise ValueError("'permission' parameter must be provided when not using a stored access policy.") if not user_delegation_key and not account_key: raise ValueError("Either user_delegation_key or account_key must be provided.") + if isinstance(account_key, UserDelegationKey): + user_delegation_key = account_key version_id = kwargs.pop('version_id', None) if version_id and snapshot: raise ValueError("snapshot and version_id cannot be set at the same time.") @@ -592,5 +684,16 @@ def generate_blob_sas( start=start, policy_id=policy_id, ip=ip, + sts_hook=sts_hook, **kwargs ) + +def _is_credential_sastoken(credential: Any) -> bool: + if not credential or not isinstance(credential, str): + return False + + sas_values = QueryStringConstants.to_list() + parsed_query = parse_qs(credential.lstrip("?")) + if parsed_query and all(k in sas_values for k in parsed_query): + return True + return False diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_upload_helpers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_upload_helpers.py index 94313f635e43..2ce55f7ab237 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_upload_helpers.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_upload_helpers.py @@ -3,33 +3,39 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=no-self-use from io import SEEK_SET, UnsupportedOperation -from typing import Optional, Union, Any, TypeVar, TYPE_CHECKING # pylint: disable=unused-import +from typing import Any, cast, Dict, IO, Optional, TypeVar, TYPE_CHECKING -import six from azure.core.exceptions import ResourceExistsError, ResourceModifiedError, HttpResponseError -from ._shared.response_handlers import ( - process_storage_error, - return_response_headers) +from ._encryption import ( + _ENCRYPTION_PROTOCOL_V1, + _ENCRYPTION_PROTOCOL_V2, + encrypt_blob, + GCMBlobEncryptionStream, + generate_blob_encryption_data, + get_adjusted_upload_size, + get_blob_encryptor_and_padder +) +from ._generated.models import ( + AppendPositionAccessConditions, + BlockLookupList, + ModifiedAccessConditions +) from ._shared.models import StorageErrorCode +from ._shared.response_handlers import process_storage_error, return_response_headers from ._shared.uploads import ( - upload_data_chunks, - upload_substream_blocks, + AppendBlobChunkUploader, BlockBlobChunkUploader, PageBlobChunkUploader, - AppendBlobChunkUploader) -from ._shared.encryption import generate_blob_encryption_data, encrypt_blob -from ._generated.models import ( - BlockLookupList, - AppendPositionAccessConditions, - ModifiedAccessConditions, + upload_data_chunks, + upload_substream_blocks ) if TYPE_CHECKING: - from datetime import datetime # pylint: disable=unused-import + from ._generated.operations import AppendBlobOperations, BlockBlobOperations, PageBlobOperations + from ._shared.models import StorageConfiguration BlobLeaseClient = TypeVar("BlobLeaseClient") _LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 @@ -58,41 +64,46 @@ def _any_conditions(modified_access_conditions=None, **kwargs): # pylint: disab ]) -def upload_block_blob( # pylint: disable=too-many-locals - client=None, - data=None, - stream=None, - length=None, - overwrite=None, - headers=None, - validate_content=None, - max_concurrency=None, - blob_settings=None, - encryption_options=None, - **kwargs): +def upload_block_blob( # pylint: disable=too-many-locals, too-many-statements + client: "BlockBlobOperations", + stream: IO, + overwrite: bool, + encryption_options: Dict[str, Any], + blob_settings: "StorageConfiguration", + headers: Dict[str, Any], + validate_content: bool, + max_concurrency: Optional[int], + length: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: try: if not overwrite and not _any_conditions(**kwargs): kwargs['modified_access_conditions'].if_none_match = '*' adjusted_count = length if (encryption_options.get('key') is not None) and (adjusted_count is not None): - adjusted_count += (16 - (length % 16)) + adjusted_count = get_adjusted_upload_size(adjusted_count, encryption_options['version']) blob_headers = kwargs.pop('blob_headers', None) tier = kwargs.pop('standard_blob_tier', None) blob_tags_string = kwargs.pop('blob_tags_string', None) + immutability_policy = kwargs.pop('immutability_policy', None) + immutability_policy_expiry = None if immutability_policy is None else immutability_policy.expiry_time + immutability_policy_mode = None if immutability_policy is None else immutability_policy.policy_mode + legal_hold = kwargs.pop('legal_hold', None) + progress_hook = kwargs.pop('progress_hook', None) + # Do single put if the size is smaller than or equal config.max_single_put_size if adjusted_count is not None and (adjusted_count <= blob_settings.max_single_put_size): - try: - data = data.read(length) - if not isinstance(data, six.binary_type): - raise TypeError('Blob data should be of type bytes.') - except AttributeError: - pass + data = stream.read(length or -1) + if not isinstance(data, bytes): + raise TypeError('Blob data should be of type bytes.') + if encryption_options.get('key'): - encryption_data, data = encrypt_blob(data, encryption_options['key']) + encryption_data, data = encrypt_blob(data, encryption_options['key'], encryption_options['version']) headers['x-ms-meta-encryptiondata'] = encryption_data - return client.upload( - body=data, + + response = client.upload( + body=data, # type: ignore [arg-type] content_length=adjusted_count, blob_http_headers=blob_headers, headers=headers, @@ -102,8 +113,16 @@ def upload_block_blob( # pylint: disable=too-many-locals upload_stream_current=0, tier=tier.value if tier else None, blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, **kwargs) + if progress_hook: + progress_hook(adjusted_count, adjusted_count) + + return cast(Dict[str, Any], response) + use_original_upload_path = blob_settings.use_byte_buffer or \ validate_content or encryption_options.get('required') or \ blob_settings.max_block_size < blob_settings.min_large_block_upload_threshold or \ @@ -111,20 +130,37 @@ def upload_block_blob( # pylint: disable=too-many-locals not hasattr(stream, 'seek') or not hasattr(stream, 'tell') if use_original_upload_path: - if encryption_options.get('key'): - cek, iv, encryption_data = generate_blob_encryption_data(encryption_options['key']) - headers['x-ms-meta-encryptiondata'] = encryption_data - encryption_options['cek'] = cek - encryption_options['vector'] = iv + total_size = length + encryptor, padder = None, None + if encryption_options and encryption_options.get('key'): + cek, iv, encryption_metadata = generate_blob_encryption_data( + encryption_options['key'], + encryption_options['version']) + headers['x-ms-meta-encryptiondata'] = encryption_metadata + + if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V1: + encryptor, padder = get_blob_encryptor_and_padder(cek, iv, True) + + # Adjust total_size for encryption V2 + if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V2: + # Adjust total_size for encryption V2 + total_size = adjusted_count + # V2 wraps the data stream with an encryption stream + if cek is None: + raise ValueError("Generate encryption metadata failed. 'cek' is None.") + stream = GCMBlobEncryptionStream(cek, stream) # type: ignore [assignment] + block_ids = upload_data_chunks( service=client, uploader_class=BlockBlobChunkUploader, - total_size=length, + total_size=total_size, chunk_size=blob_settings.max_block_size, max_concurrency=max_concurrency, stream=stream, validate_content=validate_content, - encryption_options=encryption_options, + progress_hook=progress_hook, + encryptor=encryptor, + padder=padder, headers=headers, **kwargs ) @@ -137,13 +173,14 @@ def upload_block_blob( # pylint: disable=too-many-locals max_concurrency=max_concurrency, stream=stream, validate_content=validate_content, + progress_hook=progress_hook, headers=headers, **kwargs ) block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) block_lookup.latest = block_ids - return client.commit_block_list( + return cast(Dict[str, Any], client.commit_block_list( block_lookup, blob_http_headers=blob_headers, cls=return_response_headers, @@ -151,7 +188,10 @@ def upload_block_blob( # pylint: disable=too-many-locals headers=headers, tier=tier.value if tier else None, blob_tags_string=blob_tags_string, - **kwargs) + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + **kwargs)) except HttpResponseError as error: try: process_storage_error(error) @@ -162,48 +202,63 @@ def upload_block_blob( # pylint: disable=too-many-locals def upload_page_blob( - client=None, - stream=None, - length=None, - overwrite=None, - headers=None, - validate_content=None, - max_concurrency=None, - blob_settings=None, - encryption_options=None, - **kwargs): + client: "PageBlobOperations", + overwrite: bool, + encryption_options: Dict[str, Any], + blob_settings: "StorageConfiguration", + headers: Dict[str, Any], + stream: IO, + length: Optional[int] = None, + validate_content: Optional[bool] = None, + max_concurrency: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: try: if not overwrite and not _any_conditions(**kwargs): kwargs['modified_access_conditions'].if_none_match = '*' if length is None or length < 0: raise ValueError("A content length must be specified for a Page Blob.") if length % 512 != 0: - raise ValueError("Invalid page blob size: {0}. " - "The size must be aligned to a 512-byte boundary.".format(length)) + raise ValueError(f"Invalid page blob size: {length}. " + "The size must be aligned to a 512-byte boundary.") + tier = None if kwargs.get('premium_page_blob_tier'): premium_page_blob_tier = kwargs.pop('premium_page_blob_tier') try: - headers['x-ms-access-tier'] = premium_page_blob_tier.value + tier = premium_page_blob_tier.value except AttributeError: - headers['x-ms-access-tier'] = premium_page_blob_tier - if encryption_options and encryption_options.get('data'): - headers['x-ms-meta-encryptiondata'] = encryption_options['data'] + tier = premium_page_blob_tier + + if encryption_options and encryption_options.get('key'): + cek, iv, encryption_data = generate_blob_encryption_data( + encryption_options['key'], + encryption_options['version']) + headers['x-ms-meta-encryptiondata'] = encryption_data + blob_tags_string = kwargs.pop('blob_tags_string', None) + progress_hook = kwargs.pop('progress_hook', None) - response = client.create( + response = cast(Dict[str, Any], client.create( content_length=0, blob_content_length=length, - blob_sequence_number=None, + blob_sequence_number=None, # type: ignore [arg-type] blob_http_headers=kwargs.pop('blob_headers', None), blob_tags_string=blob_tags_string, + tier=tier, cls=return_response_headers, headers=headers, - **kwargs) + **kwargs)) if length == 0: - return response + return cast(Dict[str, Any], response) + + if encryption_options and encryption_options.get('key'): + if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V1: + encryptor, padder = get_blob_encryptor_and_padder(cek, iv, False) + kwargs['encryptor'] = encryptor + kwargs['padder'] = padder kwargs['modified_access_conditions'] = ModifiedAccessConditions(if_match=response['etag']) - return upload_data_chunks( + return cast(Dict[str, Any], upload_data_chunks( service=client, uploader_class=PageBlobChunkUploader, total_size=length, @@ -211,9 +266,9 @@ def upload_page_blob( stream=stream, max_concurrency=max_concurrency, validate_content=validate_content, - encryption_options=encryption_options, + progress_hook=progress_hook, headers=headers, - **kwargs) + **kwargs)) except HttpResponseError as error: try: @@ -225,16 +280,17 @@ def upload_page_blob( def upload_append_blob( # pylint: disable=unused-argument - client=None, - stream=None, - length=None, - overwrite=None, - headers=None, - validate_content=None, - max_concurrency=None, - blob_settings=None, - encryption_options=None, - **kwargs): + client: "AppendBlobOperations", + overwrite: bool, + encryption_options: Dict[str, Any], + blob_settings: "StorageConfiguration", + headers: Dict[str, Any], + stream: IO, + length: Optional[int] = None, + validate_content: Optional[bool] = None, + max_concurrency: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: try: if length == 0: return {} @@ -243,6 +299,7 @@ def upload_append_blob( # pylint: disable=unused-argument max_size=kwargs.pop('maxsize_condition', None), append_position=None) blob_tags_string = kwargs.pop('blob_tags_string', None) + progress_hook = kwargs.pop('progress_hook', None) try: if overwrite: @@ -252,7 +309,7 @@ def upload_append_blob( # pylint: disable=unused-argument headers=headers, blob_tags_string=blob_tags_string, **kwargs) - return upload_data_chunks( + return cast(Dict[str, Any], upload_data_chunks( service=client, uploader_class=AppendBlobChunkUploader, total_size=length, @@ -261,26 +318,27 @@ def upload_append_blob( # pylint: disable=unused-argument max_concurrency=max_concurrency, validate_content=validate_content, append_position_access_conditions=append_conditions, + progress_hook=progress_hook, headers=headers, - **kwargs) + **kwargs)) except HttpResponseError as error: - if error.response.status_code != 404: + if error.response.status_code != 404: # type: ignore [union-attr] raise # rewind the request body if it is a stream if hasattr(stream, 'read'): try: # attempt to rewind the body to the initial position stream.seek(0, SEEK_SET) - except UnsupportedOperation: + except UnsupportedOperation as exc: # if body is not seekable, then retry would not work - raise error + raise error from exc client.create( content_length=0, blob_http_headers=blob_headers, headers=headers, blob_tags_string=blob_tags_string, **kwargs) - return upload_data_chunks( + return cast(Dict[str, Any], upload_data_chunks( service=client, uploader_class=AppendBlobChunkUploader, total_size=length, @@ -289,7 +347,8 @@ def upload_append_blob( # pylint: disable=unused-argument max_concurrency=max_concurrency, validate_content=validate_content, append_position_access_conditions=append_conditions, + progress_hook=progress_hook, headers=headers, - **kwargs) + **kwargs)) except HttpResponseError as error: process_storage_error(error) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_version.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_version.py index 8d23bd9195d5..5192aed4c84b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_version.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/_version.py @@ -4,4 +4,4 @@ # license information. # -------------------------------------------------------------------------- -VERSION = "12.7.1" +VERSION = "12.25.0b1" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/__init__.py index 33c10319aaa0..a755e6a2d59b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/__init__.py @@ -3,9 +3,12 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-keyword-should-match-keyword-only import os +from typing import Any, AnyStr, Dict, cast, IO, Iterable, Optional, Union, TYPE_CHECKING +from ._list_blobs_helper import BlobPrefix from .._models import BlobType from .._shared.policies_async import ExponentialRetry, LinearRetry from ._blob_client_async import BlobClient @@ -14,13 +17,17 @@ from ._lease_async import BlobLeaseClient from ._download_async import StorageStreamDownloader +if TYPE_CHECKING: + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential + from azure.core.credentials_async import AsyncTokenCredential + async def upload_blob_to_url( - blob_url, # type: str - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - credential=None, # type: Any - **kwargs): - # type: (...) -> dict[str, Any] + blob_url: str, + data: Union[Iterable[AnyStr], IO[AnyStr]], + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any +) -> Dict[str, Any]: """Upload data to a given URL The data will be uploaded as a block blob. @@ -33,10 +40,17 @@ async def upload_blob_to_url( :param credential: The credentials with which to authenticate. This is optional if the blob URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob_to_url will overwrite any existing data. If set to False, the @@ -60,24 +74,27 @@ async def upload_blob_to_url( :keyword str encoding: Encoding to use if text is supplied as input. Defaults to UTF-8. :returns: Blob-updated property dict (Etag and last modified) - :rtype: dict(str, Any) + :rtype: dict[str, Any] """ async with BlobClient.from_blob_url(blob_url, credential=credential) as client: - return await client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs) + return await cast(BlobClient, client).upload_blob( + data=data, + blob_type=BlobType.BLOCKBLOB, + **kwargs) +# Download data to specified open file-handle. async def _download_to_stream(client, handle, **kwargs): - """Download data to specified open file-handle.""" stream = await client.download_blob(**kwargs) await stream.readinto(handle) async def download_blob_from_url( - blob_url, # type: str - output, # type: str - credential=None, # type: Any - **kwargs): - # type: (...) -> None + blob_url: str, + output: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any +) -> None: """Download the contents of a blob to a local file or stream. :param str blob_url: @@ -89,10 +106,17 @@ async def download_blob_from_url( :param credential: The credentials with which to authenticate. This is optional if the blob URL already has a SAS token or the blob is public. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None :keyword bool overwrite: Whether the local file should be overwritten if it already exists. The default value is `False` - in which case a ValueError will be raised if the file already exists. If set to @@ -123,7 +147,7 @@ async def download_blob_from_url( await _download_to_stream(client, output, **kwargs) else: if not overwrite and os.path.isfile(output): - raise ValueError("The file '{}' already exists.".format(output)) + raise ValueError(f"The file '{output}' already exists.") with open(output, 'wb') as file_handle: await _download_to_stream(client, file_handle, **kwargs) @@ -132,6 +156,7 @@ async def download_blob_from_url( 'upload_blob_to_url', 'download_blob_from_url', 'BlobServiceClient', + 'BlobPrefix', 'ContainerClient', 'BlobClient', 'BlobLeaseClient', diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_blob_client_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_blob_client_async.py index d67271a00411..7cb074487f58 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_blob_client_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_blob_client_async.py @@ -3,45 +3,93 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=too-many-lines, invalid-overridden-method +# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only + +import warnings +from datetime import datetime from functools import partial -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, IO, Iterable, AnyStr, Dict, List, Tuple, +from typing import ( + Any, AnyStr, AsyncIterable, cast, Dict, IO, Iterable, List, Optional, overload, Tuple, Union, TYPE_CHECKING ) +from typing_extensions import Self +from azure.core.async_paging import AsyncItemPaged +from azure.core.exceptions import ResourceNotFoundError, HttpResponseError, ResourceExistsError +from azure.core.pipeline import AsyncPipeline +from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.exceptions import ResourceNotFoundError, HttpResponseError -from .._shared.base_client_async import AsyncStorageAccountHostsMixin -from .._shared.policies_async import ExponentialRetry -from .._shared.response_handlers import return_response_headers, process_storage_error -from .._deserialize import get_page_ranges_result, parse_tags, deserialize_pipeline_response_into_cls -from .._serialize import get_modify_conditions, get_api_version, get_access_conditions -from .._generated.aio import AzureBlobStorage -from .._generated.models import CpkInfo -from .._deserialize import deserialize_blob_properties -from .._blob_client import BlobClient as BlobClientBase +from ._download_async import StorageStreamDownloader +from ._lease_async import BlobLeaseClient +from ._models import PageRangePaged from ._upload_helpers import ( - upload_block_blob, upload_append_blob, - upload_page_blob) -from .._models import BlobType, BlobBlock, BlobProperties -from ._lease_async import BlobLeaseClient -from ._download_async import StorageStreamDownloader - + upload_block_blob, + upload_page_blob +) +from .._blob_client import StorageAccountHostsMixin +from .._blob_client_helpers import ( + _abort_copy_options, + _append_block_from_url_options, + _append_block_options, + _clear_page_options, + _commit_block_list_options, + _create_append_blob_options, + _create_page_blob_options, + _create_snapshot_options, + _delete_blob_options, + _download_blob_options, + _format_url, + _from_blob_url, + _get_blob_tags_options, + _get_block_list_result, + _get_page_ranges_options, + _parse_url, + _resize_blob_options, + _seal_append_blob_options, + _set_blob_metadata_options, + _set_blob_tags_options, + _set_http_headers_options, + _set_sequence_number_options, + _stage_block_from_url_options, + _stage_block_options, + _start_copy_from_url_options, + _upload_blob_from_url_options, + _upload_blob_options, + _upload_page_options, + _upload_pages_from_url_options +) +from .._deserialize import ( + deserialize_blob_properties, + deserialize_pipeline_response_into_cls, + get_page_ranges_result, + parse_tags +) +from .._encryption import StorageEncryptionMixin, _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION +from .._generated.aio import AzureBlobStorage +from .._generated.models import CpkInfo +from .._models import BlobType, BlobBlock, BlobProperties, PageRange +from .._serialize import get_access_conditions, get_api_version, get_modify_conditions, get_version_id +from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper, parse_connection_str +from .._shared.policies_async import ExponentialRetry +from .._shared.response_handlers import process_storage_error, return_response_headers if TYPE_CHECKING: - from datetime import datetime - from .._models import ( # pylint: disable=unused-import + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential + from azure.core.credentials_async import AsyncTokenCredential + from azure.core.pipeline.policies import AsyncHTTPPolicy + from azure.storage.blob.aio import ContainerClient + from .._models import ( ContentSettings, + ImmutabilityPolicy, PremiumPageBlobTier, - StandardBlobTier, - SequenceNumberAction + SequenceNumberAction, + StandardBlobTier ) -class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase): # pylint: disable=too-many-public-methods +class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, StorageEncryptionMixin): # type: ignore [misc] # pylint: disable=too-many-public-methods """A client to interact with a specific blob, although that blob may not yet exist. :param str account_url: @@ -58,13 +106,15 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase): # pylint: disa :param credential: The credentials with which to authenticate. This is optional if the account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. :keyword str api_version: - The Storage API version to use for requests. Default value is '2019-07-07'. - Setting to an older version may result in reduced feature compatibility. + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. .. versionadded:: 12.2.0 @@ -83,6 +133,11 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase): # pylint: disa the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, or 4MB. + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. .. admonition:: Example: @@ -101,30 +156,145 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase): # pylint: disa :caption: Creating the BlobClient from a SAS URL to a blob. """ def __init__( - self, account_url, # type: str - container_name, # type: str - blob_name, # type: str - snapshot=None, # type: Optional[Union[str, Dict[str, Any]]] - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None + self, account_url: str, + container_name: str, + blob_name: str, + snapshot: Optional[Union[str, Dict[str, Any]]] = None, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) - super(BlobClient, self).__init__( - account_url, + parsed_url, sas_token, path_snapshot = _parse_url( + account_url=account_url, container_name=container_name, - blob_name=blob_name, - snapshot=snapshot, - credential=credential, - **kwargs) - self._client = AzureBlobStorage(url=self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access - self._loop = kwargs.get('loop', None) + blob_name=blob_name) + self.container_name = container_name + self.blob_name = blob_name + + if snapshot is not None and hasattr(snapshot, 'snapshot'): + self.snapshot = snapshot.snapshot + elif isinstance(snapshot, dict): + self.snapshot = snapshot['snapshot'] + else: + self.snapshot = snapshot or path_snapshot + self.version_id = kwargs.pop('version_id', None) + + # This parameter is used for the hierarchy traversal. Give precedence to credential. + self._raw_credential = credential if credential else sas_token + self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot) + super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._configure_encryption(kwargs) + + def _format_url(self, hostname: str) -> str: + return _format_url( + container_name=self.container_name, + scheme=self.scheme, + blob_name=self.blob_name, + query_str=self._query_str, + hostname=hostname + ) + + @classmethod + def from_blob_url( + cls, blob_url: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + snapshot: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> Self: + """Create BlobClient from a blob url. This doesn't support customized blob url with '/' in blob name. + + :param str blob_url: + The full endpoint URL to the Blob, including SAS token and snapshot if used. This could be + either the primary endpoint, or the secondary endpoint depending on the current `location_mode`. + :type blob_url: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] # pylint: disable=line-too-long + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`create_snapshot`. If specified, this will override + the snapshot in the url. + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :returns: A Blob client. + :rtype: ~azure.storage.blob.BlobClient + """ + account_url, container_name, blob_name, path_snapshot = _from_blob_url(blob_url=blob_url, snapshot=snapshot) + return cls( + account_url, container_name=container_name, blob_name=blob_name, + snapshot=path_snapshot, credential=credential, **kwargs + ) + + @classmethod + def from_connection_string( + cls, conn_str: str, + container_name: str, + blob_name: str, + snapshot: Optional[Union[str, Dict[str, Any]]] = None, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: + """Create BlobClient from a Connection String. + + :param str conn_str: + A connection string to an Azure Storage account. + :param container_name: The container name for the blob. + :type container_name: str + :param blob_name: The name of the blob with which to interact. + :type blob_name: str + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`create_snapshot`. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. + Credentials provided here will take precedence over those in the connection string. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] # pylint: disable=line-too-long + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :returns: A Blob client. + :rtype: ~azure.storage.blob.BlobClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START auth_from_connection_string_blob] + :end-before: [END auth_from_connection_string_blob] + :language: python + :dedent: 8 + :caption: Creating the BlobClient from a connection string. + """ + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') + if 'secondary_hostname' not in kwargs: + kwargs['secondary_hostname'] = secondary + return cls( + account_url, container_name=container_name, blob_name=blob_name, + snapshot=snapshot, credential=credential, **kwargs + ) @distributed_trace_async - async def get_account_information(self, **kwargs): # type: ignore - # type: (Optional[int]) -> Dict[str, str] + async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: """Gets information related to the storage account in which the blob resides. The information can also be retrieved if the user has a SAS to a container or blob. @@ -134,13 +304,18 @@ async def get_account_information(self, **kwargs): # type: ignore :rtype: dict(str, str) """ try: - return await self._client.blob.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + return cast(Dict[str, str], + await self._client.blob.get_account_info(cls=return_response_headers, **kwargs)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def upload_blob_from_url(self, source_url, **kwargs): - # type: (str, Any) -> Dict[str, Any] + async def upload_blob_from_url( + self, source_url: str, + *, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Any]: """ Creates a new Block Blob where the content of the blob is read from a given URL. The content of an existing blob is overwritten with the new blob. @@ -148,15 +323,17 @@ async def upload_blob_from_url(self, source_url, **kwargs): :param str source_url: A URL of up to 2 KB in length that specifies a file or blob. The value should be URL-encoded as it would appear in a request URI. - If the source is in another account, the source must either be public - or must be authenticated via a shared access signature. If the source - is public, no authentication is required. + The source must either be public or must be authenticated via a shared + access signature as part of the url or using the source_authorization keyword. + If the source is public, no authentication is required. Examples: https://myaccount.blob.core.windows.net/mycontainer/myblob https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken + :keyword dict(str, str) metadata: + Name-value pairs associated with the blob as metadata. :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob will overwrite the existing data. If set to False, the operation will fail with ResourceExistsError. @@ -167,7 +344,7 @@ async def upload_blob_from_url(self, source_url, **kwargs): The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) :paramtype tags: dict(str, str) :keyword bytearray source_content_md5: Specify the md5 that is used to verify the integrity of the source bytes. @@ -211,7 +388,11 @@ async def upload_blob_from_url(self, source_url, **kwargs): valid, the operation fails with status code 412 (Precondition Failed). :paramtype destination_lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword ~azure.storage.blob.ContentSettings content_settings: ContentSettings object used to set blob properties. Used to set content type, encoding, language, disposition, md5, and cache control. @@ -228,27 +409,35 @@ async def upload_blob_from_url(self, source_url, **kwargs): :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: A standard blob tier value to set the blob to. For this version of the library, this is only applicable to block blobs on standard storage accounts. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Response from creating a new block blob for a given URL. + :rtype: Dict[str, Any] """ - options = self._upload_blob_from_url_options( - source_url=self._encode_source_url(source_url), + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_blob_from_url_options( + source_url=source_url, + metadata=metadata, **kwargs) try: - return await self._client.block_blob.put_blob_from_url(**options) + return cast(Dict[str, Any], await self._client.block_blob.put_blob_from_url(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async async def upload_blob( - self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] - length=None, # type: Optional[int] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Any + self, data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], + blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, + length: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Creates a new blob from a data source with automatic chunking. :param data: The blob data to upload. + :type data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]] :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. :param int length: @@ -262,7 +451,7 @@ async def upload_blob( The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -317,6 +506,20 @@ async def upload_blob( A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on premium storage accounts. + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: A standard blob tier value to set the blob to. For this version of the library, this is only applicable to block blobs on standard storage accounts. @@ -327,8 +530,9 @@ async def upload_blob( value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). :keyword int max_concurrency: - Maximum number of parallel connections to use when the blob size exceeds - 64MB. + Maximum number of parallel connections to use when transferring the blob in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: Encrypts the data on the service-side with the given key. Use of customer-provided keys must be done over HTTPS. @@ -344,8 +548,18 @@ async def upload_blob( :keyword str encoding: Defaults to UTF-8. + :keyword progress_hook: + An async callback to track the progress of a long running upload. The signature is + function(current: int, total: Optional[int]) where current is the number of bytes transferred + so far, and total is the size of the blob or None if the size is unknown. + :paramtype progress_hook: Callable[[int, Optional[int]], Awaitable[None]] :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. multiple calls to the Azure service and the timeout will apply to each call individually. :returns: Blob-updated property dict (Etag and last modified) @@ -360,24 +574,62 @@ async def upload_blob( :dedent: 16 :caption: Upload a blob to the container. """ - options = self._upload_blob_options( - data, + if self.require_encryption and not self.key_encryption_key: + raise ValueError("Encryption required but no key was provided.") + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_blob_options( + data=data, blob_type=blob_type, length=length, metadata=metadata, + encryption_options={ + 'required': self.require_encryption, + 'version': self.encryption_version, + 'key': self.key_encryption_key, + 'resolver': self.key_resolver_function + }, + config=self._config, + sdk_moniker=self._sdk_moniker, + client=self._client, **kwargs) if blob_type == BlobType.BlockBlob: - return await upload_block_blob(**options) + return cast(Dict[str, Any], await upload_block_blob(**options)) if blob_type == BlobType.PageBlob: - return await upload_page_blob(**options) - return await upload_append_blob(**options) + return cast(Dict[str, Any], await upload_page_blob(**options)) + return cast(Dict[str, Any], await upload_append_blob(**options)) + + @overload + async def download_blob( + self, offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: str, + **kwargs: Any + ) -> StorageStreamDownloader[str]: + ... + + @overload + async def download_blob( + self, offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: None = None, + **kwargs: Any + ) -> StorageStreamDownloader[bytes]: + ... @distributed_trace_async - async def download_blob(self, offset=None, length=None, **kwargs): - # type: (Optional[int], Optional[int], Any) -> StorageStreamDownloader + async def download_blob( + self, offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: Union[str, None] = None, + **kwargs: Any + ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: """Downloads a blob to the StorageStreamDownloader. The readall() method must be used to read all the content or readinto() must be used to download the blob into - a stream. + a stream. Using chunks() returns an async iterator which allows the user to iterate over the content in chunks. :param int offset: Start of byte range to use for downloading a section of the blob. @@ -390,6 +642,7 @@ async def download_blob(self, offset=None, length=None, **kwargs): value that, when present, specifies the version of the blob to download. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword bool validate_content: @@ -435,11 +688,23 @@ async def download_blob(self, offset=None, length=None, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int max_concurrency: - The number of parallel connections with which to download. + Maximum number of parallel connections to use when transferring the blob in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword str encoding: Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword progress_hook: + An async callback to track the progress of a long running download. The signature is + function(current: int, total: int) where current is the number of bytes transferred + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], Awaitable[None]] :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. multiple calls to the Azure service and the timeout will apply to each call individually. :returns: A streaming object (StorageStreamDownloader) @@ -454,17 +719,35 @@ async def download_blob(self, offset=None, length=None, **kwargs): :dedent: 16 :caption: Download a blob. """ - options = self._download_blob_options( + if self.require_encryption and not (self.key_encryption_key or self.key_resolver_function): + raise ValueError("Encryption required but no key was provided.") + if length is not None and offset is None: + raise ValueError("Offset value must not be None if length is set.") + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _download_blob_options( + blob_name=self.blob_name, + container_name=self.container_name, + version_id=get_version_id(self.version_id, kwargs), offset=offset, length=length, + encoding=encoding, + encryption_options={ + 'required': self.require_encryption, + 'version': self.encryption_version, + 'key': self.key_encryption_key, + 'resolver': self.key_resolver_function + }, + config=self._config, + sdk_moniker=self._sdk_moniker, + client=self._client, **kwargs) downloader = StorageStreamDownloader(**options) await downloader._setup() # pylint: disable=protected-access return downloader @distributed_trace_async - async def delete_blob(self, delete_snapshots=False, **kwargs): - # type: (str, Any) -> None + async def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: Any) -> None: """Marks the specified blob for deletion. The blob is later deleted during garbage collection. @@ -487,6 +770,7 @@ async def delete_blob(self, delete_snapshots=False, **kwargs): value that, when present, specifies the version of the blob to delete. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword lease: @@ -518,7 +802,11 @@ async def delete_blob(self, delete_snapshots=False, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -530,22 +818,33 @@ async def delete_blob(self, delete_snapshots=False, **kwargs): :dedent: 16 :caption: Delete a blob. """ - options = self._delete_blob_options(delete_snapshots=delete_snapshots, **kwargs) + options = _delete_blob_options( + snapshot=self.snapshot, + version_id=get_version_id(self.version_id, kwargs), + delete_snapshots=delete_snapshots, + **kwargs) try: await self._client.blob.delete(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def undelete_blob(self, **kwargs): - # type: (Any) -> None + async def undelete_blob(self, **kwargs: Any) -> None: """Restores soft-deleted blobs or snapshots. Operation will only be successful if used within the specified number of days set in the delete retention policy. + If blob versioning is enabled, the base blob cannot be restored using this + method. Instead use :func:`start_copy_from_url` with the URL of the blob version + you wish to promote to the current version. + :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -563,24 +862,33 @@ async def undelete_blob(self, **kwargs): process_storage_error(error) @distributed_trace_async - async def exists(self, **kwargs): - # type: (**Any) -> bool + async def exists(self, **kwargs: Any) -> bool: """ Returns True if a blob exists with the defined parameters, and returns False otherwise. - :param str version_id: + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to check if it exists. - :param int timeout: - The timeout parameter is expressed in seconds. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: boolean + :rtype: bool """ + version_id = get_version_id(self.version_id, kwargs) try: await self._client.blob.get_properties( snapshot=self.snapshot, + version_id=version_id, **kwargs) return True + # Encrypted with CPK + except ResourceExistsError: + return True except HttpResponseError as error: try: process_storage_error(error) @@ -588,8 +896,7 @@ async def exists(self, **kwargs): return False @distributed_trace_async - async def get_blob_properties(self, **kwargs): - # type: (Any) -> BlobProperties + async def get_blob_properties(self, **kwargs: Any) -> BlobProperties: """Returns all user-defined metadata, standard HTTP properties, and system properties for the blob. It does not return the content of the blob. @@ -602,6 +909,7 @@ async def get_blob_properties(self, **kwargs): value that, when present, specifies the version of the blob to get properties. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword ~datetime.datetime if_modified_since: @@ -633,7 +941,11 @@ async def get_blob_properties(self, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: BlobProperties :rtype: ~azure.storage.blob.BlobProperties @@ -648,6 +960,7 @@ async def get_blob_properties(self, **kwargs): """ access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) + version_id = get_version_id(self.version_id, kwargs) cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: @@ -661,7 +974,7 @@ async def get_blob_properties(self, **kwargs): kwargs['cls'] = partial(deserialize_pipeline_response_into_cls, cls_method) blob_props = await self._client.blob.get_properties( timeout=kwargs.pop('timeout', None), - version_id=kwargs.pop('version_id', None), + version_id=version_id, snapshot=self.snapshot, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, @@ -674,11 +987,13 @@ async def get_blob_properties(self, **kwargs): if isinstance(blob_props, BlobProperties): blob_props.container = self.container_name blob_props.snapshot = self.snapshot - return blob_props # type: ignore + return cast(BlobProperties, blob_props) @distributed_trace_async - async def set_http_headers(self, content_settings=None, **kwargs): - # type: (Optional[ContentSettings], Any) -> None + async def set_http_headers( + self, content_settings: Optional["ContentSettings"] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Sets system properties on the blob. If one property is set for the content_settings, all properties will be overridden. @@ -714,19 +1029,25 @@ async def set_http_headers(self, content_settings=None, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Any] """ - options = self._set_http_headers_options(content_settings=content_settings, **kwargs) + options = _set_http_headers_options(content_settings=content_settings, **kwargs) try: - return await self._client.blob.set_http_headers(**options) # type: ignore + return cast(Dict[str, Any], await self._client.blob.set_http_headers(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def set_blob_metadata(self, metadata=None, **kwargs): - # type: (Optional[Dict[str, str]], Any) -> Dict[str, Union[str, datetime]] + async def set_blob_metadata( + self, metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets user-defined metadata for the blob as one or more name-value pairs. :param metadata: @@ -775,24 +1096,114 @@ async def set_blob_metadata(self, metadata=None, **kwargs): .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified) + :rtype: Dict[str, Union[str, datetime]] """ - options = self._set_blob_metadata_options(metadata=metadata, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _set_blob_metadata_options(metadata=metadata, **kwargs) try: - return await self._client.blob.set_metadata(**options) # type: ignore + return cast(Dict[str, Union[str, datetime]], await self._client.blob.set_metadata(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def create_page_blob( # type: ignore - self, size, # type: int - content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] - premium_page_blob_tier=None, # type: Optional[Union[str, PremiumPageBlobTier]] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + async def set_immutability_policy( + self, immutability_policy: "ImmutabilityPolicy", + **kwargs: Any + ) -> Dict[str, str]: + """The Set Immutability Policy operation sets the immutability policy on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :param ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + version_id = get_version_id(self.version_id, kwargs) + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + return cast(Dict[str, str], await self._client.blob.set_immutability_policy( + cls=return_response_headers,version_id=version_id, **kwargs)) + + @distributed_trace_async + async def delete_immutability_policy(self, **kwargs: Any) -> None: + """The Delete Immutability Policy operation deletes the immutability policy on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + version_id = get_version_id(self.version_id, kwargs) + await self._client.blob.delete_immutability_policy(version_id=version_id, **kwargs) + + @distributed_trace_async + async def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str, datetime, bool]]: + """The Set Legal Hold operation sets a legal hold on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :param bool legal_hold: + Specified if a legal hold should be set on the blob. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, Union[str, datetime, bool]] + """ + + version_id = get_version_id(self.version_id, kwargs) + return cast(Dict[str, Union[str, datetime, bool]], await self._client.blob.set_legal_hold( + legal_hold, version_id=version_id, cls=return_response_headers, **kwargs)) + + @distributed_trace_async + async def create_page_blob( + self, size: int, + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Creates a new Page Blob of the specified size. :param int size: @@ -813,7 +1224,7 @@ async def create_page_blob( # type: ignore The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -826,6 +1237,18 @@ async def create_page_blob( # type: ignore Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -857,25 +1280,38 @@ async def create_page_blob( # type: ignore .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict[str, Any] """ - options = self._create_page_blob_options( - size, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _create_page_blob_options( + size=size, content_settings=content_settings, metadata=metadata, premium_page_blob_tier=premium_page_blob_tier, **kwargs) try: - return await self._client.page_blob.create(**options) # type: ignore + return cast(Dict[str, Any], await self._client.page_blob.create(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def create_append_blob(self, content_settings=None, metadata=None, **kwargs): - # type: (Optional[ContentSettings], Optional[Dict[str, str]], Any) -> Dict[str, Union[str, datetime]] - """Creates a new Append Blob. + async def create_append_blob( + self, content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: + """Creates a new Append Blob. This operation creates a new 0-length append blob. The content + of any existing blob is overwritten with the newly initialized append blob. To add content to + the append blob, call the :func:`append_block` or :func:`append_block_from_url` method. :param ~azure.storage.blob.ContentSettings content_settings: ContentSettings object used to set blob properties. Used to set content type, encoding, @@ -888,11 +1324,23 @@ async def create_append_blob(self, content_settings=None, metadata=None, **kwarg The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 :paramtype tags: dict(str, str) + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -928,22 +1376,32 @@ async def create_append_blob(self, content_settings=None, metadata=None, **kwarg .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict[str, Any] """ - options = self._create_append_blob_options( + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _create_append_blob_options( content_settings=content_settings, metadata=metadata, **kwargs) try: - return await self._client.append_blob.create(**options) # type: ignore + return cast(Dict[str, Union[str, datetime]], await self._client.append_blob.create(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def create_snapshot(self, metadata=None, **kwargs): - # type: (Optional[Dict[str, str]], Any) -> Dict[str, Union[str, datetime]] + async def create_snapshot( + self, metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Creates a snapshot of the blob. A snapshot is a read-only version of a blob that's taken at a point in time. @@ -998,7 +1456,11 @@ async def create_snapshot(self, metadata=None, **kwargs): .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Snapshot ID, Etag, and last modified). :rtype: dict[str, Any] @@ -1011,20 +1473,29 @@ async def create_snapshot(self, metadata=None, **kwargs): :dedent: 12 :caption: Create a snapshot of the blob. """ - options = self._create_snapshot_options(metadata=metadata, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _create_snapshot_options(metadata=metadata, **kwargs) try: - return await self._client.blob.create_snapshot(**options) # type: ignore + return cast(Dict[str, Any], await self._client.blob.create_snapshot(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, **kwargs): - # type: (str, Optional[Dict[str, str]], bool, Any) -> Any - """Copies a blob asynchronously. - - This operation returns a copy operation - object that can be used to wait on the completion of the operation, - as well as check status or abort the copy operation. + async def start_copy_from_url( + self, source_url: str, + metadata: Optional[Dict[str, str]] = None, + incremental_copy: bool = False, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: + """Copies a blob from the given URL. + + This operation returns a dictionary containing `copy_status` and `copy_id`, + which can be used to check the status of or abort the copy operation. + `copy_status` will be 'success' if the copy completed synchronously or + 'pending' if the copy has been started asynchronously. For asynchronous copies, + the status can be checked by polling the :func:`get_blob_properties` method and + checking the copy status. Set `requires_sync` to True to force the copy to be synchronous. The Blob service copies blobs on a best-effort basis. The source blob for a copy operation may be a block blob, an append blob, @@ -1047,10 +1518,6 @@ async def start_copy_from_url(self, source_url, metadata=None, incremental_copy= end of the copy operation, the destination blob will have the same committed block count as the source. - For all blob types, you can call status() on the returned polling object - to check the status of the copy operation, or wait() to block until the - operation is complete. The final blob will be committed when the copy completes. - :param str source_url: A URL of up to 2 KB in length that specifies a file or blob. The value should be URL-encoded as it would appear in a request URI. @@ -1081,11 +1548,26 @@ async def start_copy_from_url(self, source_url, metadata=None, incremental_copy= The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_). + + The (case-sensitive) literal "COPY" can instead be passed to copy tags from the source blob. + This option is only available when `incremental_copy=False` and `requires_sync=True`. .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) + :paramtype tags: dict(str, str) or Literal["COPY"] + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword ~datetime.datetime source_if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -1140,7 +1622,11 @@ async def start_copy_from_url(self, source_url, metadata=None, incremental_copy= the lease ID given matches the active lease ID of the source blob. :paramtype source_lease: ~azure.storage.blob.aio.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -1157,8 +1643,23 @@ async def start_copy_from_url(self, source_url, metadata=None, incremental_copy= :keyword bool requires_sync: Enforces that the service will not return a response until the copy is complete. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. This option is only available when `incremental_copy` is + set to False and `requires_sync` is set to True. + + .. versionadded:: 12.9.0 + + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the sync copied blob. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.10.0 + :returns: A dictionary of copy properties (etag, last_modified, copy_id, copy_status). - :rtype: dict[str, str or ~datetime.datetime] + :rtype: dict[str, Union[str, ~datetime.datetime]] .. admonition:: Example: @@ -1169,21 +1670,23 @@ async def start_copy_from_url(self, source_url, metadata=None, incremental_copy= :dedent: 16 :caption: Copy a blob from a URL. """ - options = self._start_copy_from_url_options( - source_url=self._encode_source_url(source_url), + options = _start_copy_from_url_options( + source_url=source_url, metadata=metadata, incremental_copy=incremental_copy, **kwargs) try: if incremental_copy: - return await self._client.page_blob.copy_incremental(**options) - return await self._client.blob.start_copy_from_url(**options) + return cast(Dict[str, Union[str, datetime]], await self._client.page_blob.copy_incremental(**options)) + return cast(Dict[str, Union[str, datetime]], await self._client.blob.start_copy_from_url(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def abort_copy(self, copy_id, **kwargs): - # type: (Union[str, Dict[str, Any], BlobProperties], Any) -> None + async def abort_copy( + self, copy_id: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any + ) -> None: """Abort an ongoing copy operation. This will leave a destination blob with zero length and full metadata. @@ -1204,15 +1707,18 @@ async def abort_copy(self, copy_id, **kwargs): :dedent: 16 :caption: Abort copying a blob from URL. """ - options = self._abort_copy_options(copy_id, **kwargs) + options = _abort_copy_options(copy_id, **kwargs) try: await self._client.blob.abort_copy_from_url(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): - # type: (int, Optional[str], Any) -> BlobLeaseClient + async def acquire_lease( + self, lease_duration: int =-1, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> BlobLeaseClient: """Requests a new lease. If the blob does not have an active lease, the Blob @@ -1251,7 +1757,11 @@ async def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A BlobLeaseClient object. :rtype: ~azure.storage.blob.aio.BlobLeaseClient @@ -1264,13 +1774,12 @@ async def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): :dedent: 12 :caption: Acquiring a lease on a blob. """ - lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore + lease = BlobLeaseClient(self, lease_id=lease_id) await lease.acquire(lease_duration=lease_duration, **kwargs) return lease @distributed_trace_async - async def set_standard_blob_tier(self, standard_blob_tier, **kwargs): - # type: (Union[str, StandardBlobTier], Any) -> None + async def set_standard_blob_tier(self, standard_blob_tier: Union[str, "StandardBlobTier"], **kwargs: Any) -> None: """This operation sets the tier on a block blob. A block blob's tier determines Hot/Cool/Archive storage type. @@ -1293,7 +1802,11 @@ async def set_standard_blob_tier(self, standard_blob_tier, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -1302,6 +1815,7 @@ async def set_standard_blob_tier(self, standard_blob_tier, **kwargs): """ access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) + version_id = get_version_id(self.version_id, kwargs) if standard_blob_tier is None: raise ValueError("A StandardBlobTier must be specified") try: @@ -1310,24 +1824,25 @@ async def set_standard_blob_tier(self, standard_blob_tier, **kwargs): timeout=kwargs.pop('timeout', None), modified_access_conditions=mod_conditions, lease_access_conditions=access_conditions, + version_id=version_id, **kwargs) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async async def stage_block( - self, block_id, # type: str - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> None + self, block_id: str, + data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + length: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Creates a new block to be committed as part of a blob. :param str block_id: A string value that identifies the block. The string should be less than or equal to 64 bytes in size. For a given blob, the block_id must be the same size for each block. :param data: The blob data. + :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] :param int length: Size of the block. :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the blob. The storage @@ -1358,29 +1873,37 @@ async def stage_block( .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. - :rtype: None + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Blob property dict. + :rtype: Dict[str, Any] """ - options = self._stage_block_options( - block_id, - data, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _stage_block_options( + block_id=block_id, + data=data, length=length, **kwargs) try: - return await self._client.block_blob.stage_block(**options) + return cast(Dict[str, Any], await self._client.block_blob.stage_block(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async async def stage_block_from_url( - self, block_id, # type: str - source_url, # type: str - source_offset=None, # type: Optional[int] - source_length=None, # type: Optional[int] - source_content_md5=None, # type: Optional[Union[bytes, bytearray]] - **kwargs - ): - # type: (...) -> None + self, block_id: str, + source_url: str, + source_offset: Optional[int] = None, + source_length: Optional[int] = None, + source_content_md5: Optional[Union[bytes, bytearray]] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Creates a new block to be committed as part of a blob where the contents are read from a URL. @@ -1413,24 +1936,36 @@ async def stage_block_from_url( .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. - :rtype: None + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Blob property dict. + :rtype: Dict[str, Any] """ - options = self._stage_block_from_url_options( - block_id, - source_url=self._encode_source_url(source_url), + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _stage_block_from_url_options( + block_id=block_id, + source_url=source_url, source_offset=source_offset, source_length=source_length, source_content_md5=source_content_md5, **kwargs) try: - return await self._client.block_blob.stage_block_from_url(**options) + return cast(Dict[str, Any], await self._client.block_blob.stage_block_from_url(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def get_block_list(self, block_list_type="committed", **kwargs): - # type: (Optional[str], Any) -> Tuple[List[BlobBlock], List[BlobBlock]] + async def get_block_list( + self, block_list_type: str = "committed", + **kwargs: Any + ) -> Tuple[List[BlobBlock], List[BlobBlock]]: """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block blob. @@ -1449,9 +1984,13 @@ async def get_block_list(self, block_list_type="committed", **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A tuple of two lists - committed and uncommitted blocks - :rtype: tuple(list(~azure.storage.blob.BlobBlock), list(~azure.storage.blob.BlobBlock)) + :rtype: Tuple[List[BlobBlock], List[BlobBlock]] """ access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) @@ -1465,16 +2004,15 @@ async def get_block_list(self, block_list_type="committed", **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - return self._get_block_list_result(blocks) + return _get_block_list_result(blocks) @distributed_trace_async - async def commit_block_list( # type: ignore - self, block_list, # type: List[BlobBlock] - content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + async def commit_block_list( + self, block_list: List[BlobBlock], + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """The Commit Block List operation writes a blob by specifying the list of block IDs that make up the blob. @@ -1491,7 +2029,7 @@ async def commit_block_list( # type: ignore The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -1500,6 +2038,18 @@ async def commit_block_list( # type: ignore Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword bool validate_content: If true, calculates an MD5 hash of the page content. The storage service checks the hash of the content that has arrived @@ -1547,23 +2097,30 @@ async def commit_block_list( # type: ignore .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._commit_block_list_options( - block_list, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _commit_block_list_options( + block_list=block_list, content_settings=content_settings, metadata=metadata, **kwargs) try: - return await self._client.block_blob.commit_block_list(**options) # type: ignore + return cast(Dict[str, Any], await self._client.block_blob.commit_block_list(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): - # type: (Union[str, PremiumPageBlobTier], **Any) -> None + async def set_premium_page_blob_tier(self, premium_page_blob_tier: "PremiumPageBlobTier", **kwargs: Any) -> None: """Sets the page blob tiers on the blob. This API is only supported for page blobs on premium accounts. :param premium_page_blob_tier: @@ -1578,9 +2135,11 @@ async def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make - multiple calls to the Azure service and the timeout will apply to - each call individually. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -1602,8 +2161,7 @@ async def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): process_storage_error(error) @distributed_trace_async - async def set_blob_tags(self, tags=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] + async def set_blob_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: """The Set Tags operation enables users to set tags on a blob or specific blob version, but not snapshot. Each call to this operation replaces all existing tags attached to the blob. To remove all tags from the blob, call this operation with no tags set. @@ -1616,7 +2174,7 @@ async def set_blob_tags(self, tags=None, **kwargs): The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) :type tags: dict(str, str) :keyword str version_id: The version id parameter is an opaque DateTime @@ -1636,19 +2194,23 @@ async def set_blob_tags(self, tags=None, **kwargs): or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Any] """ - options = self._set_blob_tags_options(tags=tags, **kwargs) + version_id = get_version_id(self.version_id, kwargs) + options = _set_blob_tags_options(version_id=version_id, tags=tags, **kwargs) try: - return await self._client.blob.set_tags(**options) + return cast(Dict[str, Any], await self._client.blob.set_tags(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def get_blob_tags(self, **kwargs): - # type: (**Any) -> Dict[str, str] + async def get_blob_tags(self, **kwargs: Any) -> Dict[str, str]: """The Get Tags operation enables users to get tags on a blob or specific blob version, but not snapshot. .. versionadded:: 12.4.0 @@ -1665,26 +2227,30 @@ async def get_blob_tags(self, **kwargs): or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Key value pairs of blob tags. :rtype: Dict[str, str] """ - options = self._get_blob_tags_options(**kwargs) + version_id = get_version_id(self.version_id, kwargs) + options = _get_blob_tags_options(version_id=version_id, snapshot=self.snapshot, **kwargs) try: _, tags = await self._client.blob.get_tags(**options) - return parse_tags(tags) # pylint: disable=protected-access + return cast(Dict[str, str], parse_tags(tags)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def get_page_ranges( # type: ignore - self, offset=None, # type: Optional[int] - length=None, # type: Optional[int] - previous_snapshot_diff=None, # type: Optional[Union[str, Dict[str, Any]]] - **kwargs - ): - # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] - """Returns the list of valid page ranges for a Page Blob or snapshot + async def get_page_ranges( + self, offset: Optional[int] = None, + length: Optional[int] = None, + previous_snapshot_diff: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: + """DEPRECATED: Returns the list of valid page ranges for a Page Blob or snapshot of a page blob. :param int offset: @@ -1733,13 +2299,23 @@ async def get_page_ranges( # type: ignore .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. The first element are filled page ranges, the 2nd element is cleared page ranges. :rtype: tuple(list(dict(str, str), list(dict(str, str)) """ - options = self._get_page_ranges_options( + warnings.warn( + "get_page_ranges is deprecated, use list_page_ranges instead", + DeprecationWarning + ) + + options = _get_page_ranges_options( + snapshot=self.snapshot, offset=offset, length=length, previous_snapshot_diff=previous_snapshot_diff, @@ -1753,14 +2329,104 @@ async def get_page_ranges( # type: ignore process_storage_error(error) return get_page_ranges_result(ranges) + @distributed_trace + def list_page_ranges( + self, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + previous_snapshot: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> AsyncItemPaged[PageRange]: + """Returns the list of valid page ranges for a Page Blob or snapshot + of a page blob. If `previous_snapshot` is specified, the result will be + a diff of changes between the target blob and the previous snapshot. + + :keyword int offset: + Start of byte range to use for getting valid page ranges. + If no length is given, all bytes after the offset will be searched. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword int length: + Number of bytes to use for getting valid page ranges. + If length is given, offset must be provided. + This range will return valid page ranges from the offset start up to + the specified length. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword previous_snapshot: + A snapshot value that specifies that the response will contain only pages that were changed + between target blob and previous snapshot. Changed pages include both updated and cleared + pages. The target blob may be a snapshot, as long as the snapshot specified by `previous_snapshot` + is the older of the two. + :paramtype previous_snapshot: str or Dict[str, Any] + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int results_per_page: + The maximum number of page ranges to retrieve per API call. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: An iterable (auto-paging) of PageRange. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.PageRange] + """ + results_per_page = kwargs.pop('results_per_page', None) + options = _get_page_ranges_options( + snapshot=self.snapshot, + offset=offset, + length=length, + previous_snapshot_diff=previous_snapshot, + **kwargs) + + if previous_snapshot: + command = partial( + self._client.page_blob.get_page_ranges_diff, + **options) + else: + command = partial( + self._client.page_blob.get_page_ranges, + **options) + return AsyncItemPaged( + command, results_per_page=results_per_page, + page_iterator_class=PageRangePaged) + @distributed_trace_async async def get_page_range_diff_for_managed_disk( - self, previous_snapshot_url, # type: str - offset=None, # type: Optional[int] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] + self, previous_snapshot_url: str, + offset: Optional[int] = None, + length: Optional[int] = None, + **kwargs: Any + ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: """Returns the list of valid page ranges for a managed disk or snapshot. .. note:: @@ -1769,7 +2435,7 @@ async def get_page_range_diff_for_managed_disk( .. versionadded:: 12.2.0 This operation was introduced in API version '2019-07-07'. - :param previous_snapshot_url: + :param str previous_snapshot_url: Specifies the URL of a previous snapshot of the managed disk. The response will only contain pages that were changed between the target blob and its previous snapshot. @@ -1809,13 +2475,18 @@ async def get_page_range_diff_for_managed_disk( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. The first element are filled page ranges, the 2nd element is cleared page ranges. :rtype: tuple(list(dict(str, str), list(dict(str, str)) """ - options = self._get_page_ranges_options( + options = _get_page_ranges_options( + snapshot=self.snapshot, offset=offset, length=length, prev_snapshot_url=previous_snapshot_url, @@ -1827,12 +2498,11 @@ async def get_page_range_diff_for_managed_disk( return get_page_ranges_result(ranges) @distributed_trace_async - async def set_sequence_number( # type: ignore - self, sequence_number_action, # type: Union[str, SequenceNumberAction] - sequence_number=None, # type: Optional[str] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + async def set_sequence_number( + self, sequence_number_action: Union[str, "SequenceNumberAction"], + sequence_number: Optional[str] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets the blob sequence number. :param str sequence_number_action: @@ -1870,20 +2540,22 @@ async def set_sequence_number( # type: ignore .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._set_sequence_number_options( - sequence_number_action, sequence_number=sequence_number, **kwargs) + options = _set_sequence_number_options(sequence_number_action, sequence_number=sequence_number, **kwargs) try: - return await self._client.page_blob.update_sequence_number(**options) # type: ignore + return cast(Dict[str, Any], await self._client.page_blob.update_sequence_number(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def resize_blob(self, size, **kwargs): - # type: (int, Any) -> Dict[str, Union[str, datetime]] + async def resize_blob(self, size: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: """Resizes a page blob to the specified size. If the specified value is less than the current size of the blob, @@ -1924,24 +2596,29 @@ async def resize_blob(self, size, **kwargs): blob and number of allowed IOPS. This is only applicable to page blobs on premium storage accounts. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._resize_blob_options(size, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _resize_blob_options(size=size, **kwargs) try: - return await self._client.page_blob.resize(**options) # type: ignore + return cast(Dict[str, Any], await self._client.page_blob.resize(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def upload_page( # type: ignore - self, page, # type: bytes - offset, # type: int - length, # type: int - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + async def upload_page( + self, page: bytes, + offset: int, + length: int, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """The Upload Pages operation writes a range of pages to a page blob. :param bytes page: @@ -2015,28 +2692,36 @@ async def upload_page( # type: ignore :keyword str encoding: Defaults to UTF-8. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._upload_page_options( + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_page_options( page=page, offset=offset, length=length, **kwargs) try: - return await self._client.page_blob.upload_pages(**options) # type: ignore + return cast(Dict[str, Any], await self._client.page_blob.upload_pages(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def upload_pages_from_url(self, source_url, # type: str - offset, # type: int - length, # type: int - source_offset, # type: int - **kwargs - ): - # type: (...) -> Dict[str, Any] + async def upload_pages_from_url( + self, source_url: str, + offset: int, + length: int, + source_offset: int, + **kwargs: Any + ) -> Dict[str, Any]: """ The Upload Pages operation writes a range of pages to a page blob where the contents are read from a URL. @@ -2126,24 +2811,36 @@ async def upload_pages_from_url(self, source_url, # type: str .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Response after uploading pages from specified URL. + :rtype: Dict[str, Any] """ - options = self._upload_pages_from_url_options( - source_url=self._encode_source_url(source_url), + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_pages_from_url_options( + source_url=source_url, offset=offset, length=length, source_offset=source_offset, **kwargs ) try: - return await self._client.page_blob.upload_pages_from_url(**options) # type: ignore + return cast(Dict[str, Any], await self._client.page_blob.upload_pages_from_url(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def clear_page(self, offset, length, **kwargs): - # type: (int, int, Any) -> Dict[str, Union[str, datetime]] + async def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: """Clears a range of pages. :param int offset: @@ -2198,27 +2895,39 @@ async def clear_page(self, offset, length, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._clear_page_options(offset, length, **kwargs) + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _clear_page_options( + offset=offset, + length=length, + **kwargs + ) try: - return await self._client.page_blob.clear_pages(**options) # type: ignore + return cast(Dict[str, Any], await self._client.page_blob.clear_pages(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def append_block( # type: ignore - self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime, int]] + async def append_block( + self, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + length: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime, int]]: """Commits a new block of data to the end of the existing append blob. :param data: Content of the block. + :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] :param int length: Size of the block in bytes. :keyword bool validate_content: @@ -2283,26 +2992,35 @@ async def append_block( # type: ignore .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). :rtype: dict(str, Any) """ - options = self._append_block_options( - data, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _append_block_options( + data=data, length=length, **kwargs ) try: - return await self._client.append_blob.append_block(**options) # type: ignore + return cast(Dict[str, Any], await self._client.append_blob.append_block(**options)) except HttpResponseError as error: process_storage_error(error) - @distributed_trace_async() - async def append_block_from_url(self, copy_source_url, # type: str - source_offset=None, # type: Optional[int] - source_length=None, # type: Optional[int] - **kwargs): - # type: (...) -> Dict[str, Union[str, datetime, int]] + @distributed_trace_async + async def append_block_from_url( + self, copy_source_url: str, + source_offset: Optional[int] = None, + source_length: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime, int]]: """ Creates a new block to be committed as part of a blob, where the contents are read from a source url. @@ -2386,22 +3104,35 @@ async def append_block_from_url(self, copy_source_url, # type: str .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Result after appending a new block. + :rtype: Dict[str, Union[str, datetime, int]] """ - options = self._append_block_from_url_options( - copy_source_url=self._encode_source_url(copy_source_url), + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _append_block_from_url_options( + copy_source_url=copy_source_url, source_offset=source_offset, source_length=source_length, **kwargs ) try: - return await self._client.append_blob.append_block_from_url(**options) # type: ignore + return cast(Dict[str, Union[str, datetime, int]], + await self._client.append_blob.append_block_from_url(**options)) except HttpResponseError as error: process_storage_error(error) - @distributed_trace_async() - async def seal_append_blob(self, **kwargs): - # type: (...) -> Dict[str, Union[str, datetime, int]] + @distributed_trace_async + async def seal_append_blob(self, **kwargs: Any) -> Dict[str, Union[str, datetime, int]]: """The Seal operation seals the Append Blob to make it read-only. .. versionadded:: 12.4.0 @@ -2434,12 +3165,51 @@ async def seal_append_blob(self, **kwargs): :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). :rtype: dict(str, Any) """ - options = self._seal_append_blob_options(**kwargs) + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + options = _seal_append_blob_options(**kwargs) try: - return await self._client.append_blob.seal(**options) # type: ignore + return cast(Dict[str, Any], await self._client.append_blob.seal(**options)) except HttpResponseError as error: process_storage_error(error) + + def _get_container_client(self) -> "ContainerClient": + """Get a client to interact with the blob's parent container. + + The container need not already exist. Defaults to current blob's credentials. + + :returns: A ContainerClient. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START get_container_client_from_blob_client] + :end-before: [END get_container_client_from_blob_client] + :language: python + :dedent: 12 + :caption: Get container client from blob object. + """ + from ._container_client_async import ContainerClient + if not isinstance(self._pipeline._transport, AsyncTransportWrapper): # pylint: disable = protected-access + _pipeline = AsyncPipeline( + transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=cast(Iterable["AsyncHTTPPolicy"], + self._pipeline._impl_policies) # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline + return ContainerClient( + f"{self.scheme}://{self.primary_hostname}", container_name=self.container_name, + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_blob_service_client_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_blob_service_client_async.py index 4e91743c38be..8f76aa98c8cf 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_blob_service_client_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_blob_service_client_async.py @@ -3,51 +3,66 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=invalid-overridden-method +# pylint: disable=docstring-keyword-should-match-keyword-only + import functools -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, Dict, List, +import warnings +from typing import ( + Any, cast, Dict, Iterable, List, Optional, Union, TYPE_CHECKING ) +from typing_extensions import Self +from azure.core.async_paging import AsyncItemPaged from azure.core.exceptions import HttpResponseError -from azure.core.tracing.decorator import distributed_trace from azure.core.pipeline import AsyncPipeline +from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.async_paging import AsyncItemPaged -from .._shared.models import LocationMode -from .._shared.policies_async import ExponentialRetry -from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper -from .._shared.response_handlers import return_response_headers, process_storage_error -from .._shared.parser import _to_utc_datetime -from .._shared.response_handlers import parse_to_internal_user_delegation_key +from ._blob_client_async import BlobClient +from ._container_client_async import ContainerClient +from ._models import ContainerPropertiesPaged, FilteredBlobPaged +from .._blob_service_client_helpers import _parse_url +from .._deserialize import service_properties_deserialize, service_stats_deserialize +from .._encryption import StorageEncryptionMixin from .._generated.aio import AzureBlobStorage from .._generated.models import StorageServiceProperties, KeyInfo -from .._blob_service_client import BlobServiceClient as BlobServiceClientBase -from ._container_client_async import ContainerClient -from ._blob_client_async import BlobClient -from .._models import ContainerProperties -from .._deserialize import service_stats_deserialize, service_properties_deserialize +from .._models import BlobProperties, ContainerProperties, CorsRule from .._serialize import get_api_version -from ._models import ContainerPropertiesPaged, FilteredBlobPaged +from .._shared.base_client import parse_query, StorageAccountHostsMixin +from .._shared.base_client_async import parse_connection_str +from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper +from .._shared.response_handlers import ( + parse_to_internal_user_delegation_key, + process_storage_error, + return_response_headers, +) +from .._shared.models import LocationMode +from .._shared.parser import _to_utc_datetime +from .._shared.policies_async import ExponentialRetry if TYPE_CHECKING: + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential + from azure.core.credentials_async import AsyncTokenCredential + from azure.core.pipeline.policies import AsyncHTTPPolicy from datetime import datetime - from .._shared.models import AccountSasPermissions, ResourceTypes, UserDelegationKey from ._lease_async import BlobLeaseClient from .._models import ( - BlobProperties, - PublicAccess, BlobAnalyticsLogging, + FilteredBlob, Metrics, - CorsRule, + PublicAccess, RetentionPolicy, - StaticWebsite, + StaticWebsite ) + from .._shared.models import UserDelegationKey -class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase): +class BlobServiceClient( # type: ignore [misc] + AsyncStorageAccountHostsMixin, + StorageAccountHostsMixin, + StorageEncryptionMixin +): """A client to interact with the Blob Service at the account level. This client provides operations to retrieve and configure the account properties @@ -62,13 +77,15 @@ class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase): :param credential: The credentials with which to authenticate. This is optional if the account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. :keyword str api_version: - The Storage API version to use for requests. Default value is '2019-07-07'. - Setting to an older version may result in reduced feature compatibility. + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. .. versionadded:: 12.2.0 @@ -87,6 +104,9 @@ class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase): the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, or 4MB. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. .. admonition:: Example: @@ -106,27 +126,80 @@ class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase): """ def __init__( - self, account_url, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None + self, account_url: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) - super(BlobServiceClient, self).__init__( - account_url, - credential=credential, - **kwargs) - self._client = AzureBlobStorage(url=self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access - self._loop = kwargs.get('loop', None) + parsed_url, sas_token = _parse_url(account_url=account_url) + _, sas_token = parse_query(parsed_url.query) + self._query_str, credential = self._format_query_string(sas_token, credential) + super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._configure_encryption(kwargs) + + def _format_url(self, hostname): + """Format the endpoint URL according to the current location + mode hostname. + + :param str hostname: + The hostname of the current location mode. + :returns: A formatted endpoint URL including current location mode hostname. + :rtype: str + """ + return f"{self.scheme}://{hostname}/{self._query_str}" + + @classmethod + def from_connection_string( + cls, conn_str: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: + """Create BlobServiceClient from a Connection String. + + :param str conn_str: + A connection string to an Azure Storage account. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. + Credentials provided here will take precedence over those in the connection string. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials_async.AsyncTokenCredential or + str or dict[str, str] or None + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :returns: A Blob service client. + :rtype: ~azure.storage.blob.BlobServiceClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START auth_from_connection_string] + :end-before: [END auth_from_connection_string] + :language: python + :dedent: 8 + :caption: Creating the BlobServiceClient from a connection string. + """ + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') + if 'secondary_hostname' not in kwargs: + kwargs['secondary_hostname'] = secondary + return cls(account_url, credential=credential, **kwargs) @distributed_trace_async - async def get_user_delegation_key(self, key_start_time, # type: datetime - key_expiry_time, # type: datetime - **kwargs # type: Any - ): - # type: (...) -> UserDelegationKey + async def get_user_delegation_key( + self, key_start_time: "datetime", + key_expiry_time: "datetime", + **kwargs: Any + ) -> "UserDelegationKey": """ Obtain a user delegation key for the purpose of signing SAS tokens. A token credential must be present on the service object for this request to succeed. @@ -136,7 +209,11 @@ async def get_user_delegation_key(self, key_start_time, # type: datetime :param ~datetime.datetime key_expiry_time: A DateTime value. Indicates when the key stops being valid. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: The user delegation key. :rtype: ~azure.storage.blob.UserDelegationKey """ @@ -152,8 +229,7 @@ async def get_user_delegation_key(self, key_start_time, # type: datetime return parse_to_internal_user_delegation_key(user_delegation_key) # type: ignore @distributed_trace_async - async def get_account_information(self, **kwargs): - # type: (Any) -> Dict[str, str] + async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: """Gets information related to the storage account. The information can also be retrieved if the user has a SAS to a container or blob. @@ -177,8 +253,7 @@ async def get_account_information(self, **kwargs): process_storage_error(error) @distributed_trace_async - async def get_service_stats(self, **kwargs): - # type: (Any) -> Dict[str, Any] + async def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: """Retrieves statistics related to replication for the Blob service. It is only available when read-access geo-redundant replication is enabled for @@ -198,7 +273,11 @@ async def get_service_stats(self, **kwargs): replication is enabled for your storage account. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: The blob service stats. :rtype: Dict[str, Any] @@ -220,13 +299,16 @@ async def get_service_stats(self, **kwargs): process_storage_error(error) @distributed_trace_async - async def get_service_properties(self, **kwargs): - # type: (Any) -> Dict[str, Any] + async def get_service_properties(self, **kwargs: Any) -> Dict[str, Any]: """Gets the properties of a storage account's Blob service, including Azure Storage Analytics. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An object containing blob service properties such as analytics logging, hour/minute metrics, cors rules, etc. :rtype: Dict[str, Any] @@ -249,16 +331,15 @@ async def get_service_properties(self, **kwargs): @distributed_trace_async async def set_service_properties( - self, analytics_logging=None, # type: Optional[BlobAnalyticsLogging] - hour_metrics=None, # type: Optional[Metrics] - minute_metrics=None, # type: Optional[Metrics] - cors=None, # type: Optional[List[CorsRule]] - target_version=None, # type: Optional[str] - delete_retention_policy=None, # type: Optional[RetentionPolicy] - static_website=None, # type: Optional[StaticWebsite] - **kwargs - ): - # type: (...) -> None + self, analytics_logging: Optional["BlobAnalyticsLogging"] = None, + hour_metrics: Optional["Metrics"] = None, + minute_metrics: Optional["Metrics"] = None, + cors: Optional[List[CorsRule]] = None, + target_version: Optional[str] = None, + delete_retention_policy: Optional["RetentionPolicy"] = None, + static_website: Optional["StaticWebsite"] = None, + **kwargs: Any + ) -> None: """Sets the properties of a storage account's Blob service, including Azure Storage Analytics. @@ -293,7 +374,11 @@ async def set_service_properties( and if yes, indicates the index document and 404 error document to use. :type static_website: ~azure.storage.blob.StaticWebsite :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -314,7 +399,7 @@ async def set_service_properties( logging=analytics_logging, hour_metrics=hour_metrics, minute_metrics=minute_metrics, - cors=cors, + cors=CorsRule._to_generated(cors), # pylint: disable=protected-access default_service_version=target_version, delete_retention_policy=delete_retention_policy, static_website=static_website @@ -327,11 +412,10 @@ async def set_service_properties( @distributed_trace def list_containers( - self, name_starts_with=None, # type: Optional[str] - include_metadata=False, # type: Optional[bool] - **kwargs - ): - # type: (...) -> AsyncItemPaged[ContainerProperties] + self, name_starts_with: Optional[str] = None, + include_metadata: bool = False, + **kwargs: Any + ) -> AsyncItemPaged[ContainerProperties]: """Returns a generator to list the containers under the specified account. The generator will lazily follow the continuation tokens returned by @@ -347,11 +431,18 @@ def list_containers( Specifies that deleted containers to be returned in the response. This is for container restore enabled account. The default value is `False`. .. versionadded:: 12.4.0 + :keyword bool include_system: + Flag specifying that system containers should be included. + .. versionadded:: 12.10.0 :keyword int results_per_page: The maximum number of container names to retrieve per API call. If the request does not specify the server will return up to 5,000 items. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) of ContainerProperties. :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.ContainerProperties] @@ -368,6 +459,9 @@ def list_containers( include_deleted = kwargs.pop('include_deleted', None) if include_deleted: include.append("deleted") + include_system = kwargs.pop('include_system', None) + if include_system: + include.append("system") timeout = kwargs.pop('timeout', None) results_per_page = kwargs.pop('results_per_page', None) command = functools.partial( @@ -384,8 +478,7 @@ def list_containers( ) @distributed_trace - def find_blobs_by_tags(self, filter_expression, **kwargs): - # type: (str, **Any) -> AsyncItemPaged[FilteredBlob] + def find_blobs_by_tags(self, filter_expression: str, **kwargs: Any) -> AsyncItemPaged["FilteredBlob"]: """The Filter Blobs operation enables callers to list blobs across all containers whose tags match a given search expression. Filter blobs searches across all containers within a storage account but can be @@ -398,7 +491,11 @@ def find_blobs_by_tags(self, filter_expression, **kwargs): :keyword int results_per_page: The max result per page when paginating. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) response of BlobProperties. :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.FilteredBlob] """ @@ -416,12 +513,11 @@ def find_blobs_by_tags(self, filter_expression, **kwargs): @distributed_trace_async async def create_container( - self, name, # type: str - metadata=None, # type: Optional[Dict[str, str]] - public_access=None, # type: Optional[Union[PublicAccess, str]] - **kwargs - ): - # type: (...) -> ContainerClient + self, name: str, + metadata: Optional[Dict[str, str]] = None, + public_access: Optional[Union["PublicAccess", str]] = None, + **kwargs: Any + ) -> ContainerClient: """Creates a new container under the specified account. If the container with the same name already exists, a ResourceExistsError will @@ -444,7 +540,12 @@ async def create_container( :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: A container client to interact with the newly created container. :rtype: ~azure.storage.blob.aio.ContainerClient .. admonition:: Example: @@ -465,11 +566,10 @@ async def create_container( @distributed_trace_async async def delete_container( - self, container, # type: Union[ContainerProperties, str] - lease=None, # type: Optional[Union[BlobLeaseClient, str]] - **kwargs - ): - # type: (...) -> None + self, container: Union[ContainerProperties, str], + lease: Optional[Union["BlobLeaseClient", str]] = None, + **kwargs: Any + ) -> None: """Marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. @@ -483,7 +583,7 @@ async def delete_container( If specified, delete_container only succeeds if the container's lease is active and matches this ID. Required if the container has an active lease. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :type lease: ~azure.storage.blob.aio.BlobLeaseClient or str :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -502,7 +602,11 @@ async def delete_container( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -514,17 +618,55 @@ async def delete_container( :dedent: 16 :caption: Deleting a container in the blob service. """ - container = self.get_container_client(container) # type: ignore + container_client = self.get_container_client(container) kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) - await container.delete_container( # type: ignore + await container_client.delete_container( lease=lease, timeout=timeout, **kwargs) @distributed_trace_async - async def undelete_container(self, deleted_container_name, deleted_container_version, **kwargs): - # type: (str, str, str, **Any) -> ContainerClient + async def _rename_container(self, name: str, new_name: str, **kwargs: Any) -> ContainerClient: + """Renames a container. + + Operation is successful only if the source container exists. + + :param str name: + The name of the container to rename. + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: A container client for the renamed container. + :rtype: ~azure.storage.blob.ContainerClient + """ + renamed_container = self.get_container_client(new_name) + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id + except AttributeError: + kwargs['source_lease_id'] = lease + try: + await renamed_container._client.container.rename(name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def undelete_container( + self, deleted_container_name: str, + deleted_container_version: str, + **kwargs: Any + ) -> ContainerClient: """Restores soft-deleted container. Operation will only be successful if used within the specified number of days @@ -537,14 +679,18 @@ async def undelete_container(self, deleted_container_name, deleted_container_ver Specifies the name of the deleted container to restore. :param str deleted_container_version: Specifies the version of the deleted container to restore. - :keyword str new_name: - The new name for the deleted container to be restored to. - If not specified deleted_container_name will be used as the restored container name. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: The recovered soft-deleted ContainerClient. :rtype: ~azure.storage.blob.aio.ContainerClient """ new_name = kwargs.pop('new_name', None) + if new_name: + warnings.warn("`new_name` is no longer supported.", DeprecationWarning) container = self.get_container_client(new_name or deleted_container_name) try: await container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable = protected-access @@ -554,8 +700,7 @@ async def undelete_container(self, deleted_container_name, deleted_container_ver except HttpResponseError as error: process_storage_error(error) - def get_container_client(self, container): - # type: (Union[ContainerProperties, str]) -> ContainerClient + def get_container_client(self, container: Union[ContainerProperties, str]) -> ContainerClient: """Get a client to interact with the specified container. The container need not already exist. @@ -576,27 +721,28 @@ def get_container_client(self, container): :dedent: 12 :caption: Getting the container client to interact with a specific container. """ - try: + if isinstance(container, ContainerProperties): container_name = container.name - except AttributeError: + else: container_name = container _pipeline = AsyncPipeline( transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access + policies=self._pipeline._impl_policies #type: ignore [arg-type] # pylint: disable = protected-access ) return ContainerClient( self.url, container_name=container_name, credential=self.credential, api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function, loop=self._loop) + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) def get_blob_client( - self, container, # type: Union[ContainerProperties, str] - blob, # type: Union[BlobProperties, str] - snapshot=None # type: Optional[Union[Dict[str, Any], str]] - ): - # type: (...) -> BlobClient + self, container: Union[ContainerProperties, str], + blob: str, + snapshot: Optional[Union[Dict[str, Any], str]] = None, + *, + version_id: Optional[str] = None + ) -> BlobClient: """Get a client to interact with the specified blob. The blob need not already exist. @@ -605,15 +751,15 @@ def get_blob_client( The container that the blob is in. This can either be the name of the container, or an instance of ContainerProperties. :type container: str or ~azure.storage.blob.ContainerProperties - :param blob: - The blob with which to interact. This can either be the name of the blob, - or an instance of BlobProperties. - :type blob: str or ~azure.storage.blob.BlobProperties + :param str blob: + The blob with which to interact. :param snapshot: The optional blob snapshot on which to operate. This can either be the ID of the snapshot, or a dictionary output returned by :func:`~azure.storage.blob.aio.BlobClient.create_snapshot()`. :type snapshot: str or dict(str, Any) + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. :returns: A BlobClient. :rtype: ~azure.storage.blob.aio.BlobClient @@ -626,22 +772,28 @@ def get_blob_client( :dedent: 16 :caption: Getting the blob client to interact with a specific blob. """ - try: - container_name = container.name - except AttributeError: - container_name = container - - try: + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob_name = blob.name - except AttributeError: + else: blob_name = blob + if isinstance(container, ContainerProperties): + container_name = container.name + else: + container_name = container _pipeline = AsyncPipeline( transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access + policies=cast(Iterable["AsyncHTTPPolicy"], + self._pipeline._impl_policies) # pylint: disable = protected-access ) - return BlobClient( # type: ignore + return BlobClient( self.url, container_name=container_name, blob_name=blob_name, snapshot=snapshot, credential=self.credential, api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function, loop=self._loop) + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, + version_id=version_id) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_container_client_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_container_client_async.py index e26fe2388539..306e3acf5519 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_container_client_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_container_client_async.py @@ -1,51 +1,68 @@ -# pylint: disable=too-many-lines # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=invalid-overridden-method +# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only + import functools -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, AnyStr, Dict, List, IO, AsyncIterator, +import warnings +from datetime import datetime +from typing import ( + Any, AnyStr, AsyncIterable, AsyncIterator, cast, Dict, List, IO, Iterable, Optional, overload, Union, TYPE_CHECKING ) +from urllib.parse import unquote, urlparse +from typing_extensions import Self -from azure.core.exceptions import HttpResponseError +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceNotFoundError +from azure.core.pipeline import AsyncPipeline +from azure.core.pipeline.transport import AsyncHttpResponse # pylint: disable=C4756 from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.async_paging import AsyncItemPaged -from azure.core.pipeline import AsyncPipeline -from azure.core.pipeline.transport import AsyncHttpResponse -from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper +from ._blob_client_async import BlobClient +from ._download_async import StorageStreamDownloader +from ._lease_async import BlobLeaseClient +from ._list_blobs_helper import BlobNamesPaged, BlobPropertiesPaged, BlobPrefix +from ._models import FilteredBlobPaged +from .._container_client_helpers import ( + _format_url, + _generate_delete_blobs_options, + _generate_set_tiers_options, + _parse_url +) +from .._deserialize import deserialize_container_properties +from .._encryption import StorageEncryptionMixin +from .._generated.aio import AzureBlobStorage +from .._generated.models import SignedIdentifier +from .._list_blobs_helper import IgnoreListBlobsDeserializer +from .._models import ContainerProperties, BlobType, BlobProperties, FilteredBlob +from .._serialize import get_modify_conditions, get_container_cpk_scope_info, get_api_version, get_access_conditions +from .._shared.base_client import StorageAccountHostsMixin +from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper, parse_connection_str from .._shared.policies_async import ExponentialRetry from .._shared.request_handlers import add_metadata_headers, serialize_iso from .._shared.response_handlers import ( process_storage_error, - return_response_headers, - return_headers_and_deserialized) -from .._generated.aio import AzureBlobStorage -from .._generated.models import SignedIdentifier -from .._deserialize import deserialize_container_properties -from .._serialize import get_modify_conditions, get_container_cpk_scope_info, get_api_version, get_access_conditions -from .._container_client import ContainerClient as ContainerClientBase, _get_blob_name -from .._models import ContainerProperties, BlobType, BlobProperties # pylint: disable=unused-import -from ._list_blobs_helper import BlobPropertiesPaged, BlobPrefix -from ._lease_async import BlobLeaseClient -from ._blob_client_async import BlobClient + return_headers_and_deserialized, + return_response_headers +) if TYPE_CHECKING: - from .._models import PublicAccess - from ._download_async import StorageStreamDownloader - from datetime import datetime - from .._models import ( # pylint: disable=unused-import + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential + from azure.core.credentials_async import AsyncTokenCredential + from ._blob_service_client_async import BlobServiceClient + from .._models import ( AccessPolicy, StandardBlobTier, - PremiumPageBlobTier) + PremiumPageBlobTier, + PublicAccess + ) -class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase): +class ContainerClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, StorageEncryptionMixin): # type: ignore [misc] # pylint: disable=too-many-public-methods """A client to interact with a specific container, although that container may not yet exist. @@ -61,13 +78,15 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase): :param credential: The credentials with which to authenticate. This is optional if the account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. :keyword str api_version: - The Storage API version to use for requests. Default value is '2019-07-07'. - Setting to an older version may result in reduced feature compatibility. + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. .. versionadded:: 12.2.0 @@ -86,6 +105,9 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase): the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, or 4MB. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. .. admonition:: Example: @@ -104,26 +126,143 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase): :caption: Creating the container client directly. """ def __init__( - self, account_url, # type: str - container_name, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None + self, account_url: str, + container_name: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) - super(ContainerClient, self).__init__( - account_url, - container_name=container_name, - credential=credential, - **kwargs) - self._client = AzureBlobStorage(url=self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access - self._loop = kwargs.get('loop', None) + parsed_url, sas_token = _parse_url(account_url=account_url, container_name=container_name) + + self.container_name = container_name + # This parameter is used for the hierarchy traversal. Give precedence to credential. + self._raw_credential = credential if credential else sas_token + self._query_str, credential = self._format_query_string(sas_token, credential) + super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + self._api_version = get_api_version(kwargs) + self._client = self._build_generated_client() + self._configure_encryption(kwargs) + + def _build_generated_client(self) -> AzureBlobStorage: + client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access + return client + + def _format_url(self, hostname): + return _format_url( + container_name=self.container_name, + hostname=hostname, + scheme=self.scheme, + query_str=self._query_str + ) + + @classmethod + def from_container_url( + cls, container_url: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: + """Create ContainerClient from a container url. + + :param str container_url: + The full endpoint URL to the Container, including SAS token if used. This could be + either the primary endpoint, or the secondary endpoint depending on the current `location_mode`. + :type container_url: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials_async.AsyncTokenCredential or + str or dict[str, str] or None + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :returns: A container client. + :rtype: ~azure.storage.blob.ContainerClient + """ + try: + if not container_url.lower().startswith('http'): + container_url = "https://" + container_url + except AttributeError as exc: + raise ValueError("Container URL must be a string.") from exc + parsed_url = urlparse(container_url) + if not parsed_url.netloc: + raise ValueError(f"Invalid URL: {container_url}") + + container_path = parsed_url.path.strip('/').split('/') + account_path = "" + if len(container_path) > 1: + account_path = "/" + "/".join(container_path[:-1]) + account_url = f"{parsed_url.scheme}://{parsed_url.netloc.rstrip('/')}{account_path}?{parsed_url.query}" + container_name = unquote(container_path[-1]) + if not container_name: + raise ValueError("Invalid URL. Please provide a URL with a valid container name") + return cls(account_url, container_name=container_name, credential=credential, **kwargs) + + @classmethod + def from_connection_string( + cls, conn_str: str, + container_name: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: + """Create ContainerClient from a Connection String. + + :param str conn_str: + A connection string to an Azure Storage account. + :param container_name: + The container name for the blob. + :type container_name: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. + Credentials provided here will take precedence over those in the connection string. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials_async.AsyncTokenCredential or + str or dict[str, str] or None + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :returns: A container client. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START auth_from_connection_string_container] + :end-before: [END auth_from_connection_string_container] + :language: python + :dedent: 8 + :caption: Creating the ContainerClient from a connection string. + """ + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') + if 'secondary_hostname' not in kwargs: + kwargs['secondary_hostname'] = secondary + return cls( + account_url, container_name=container_name, credential=credential, **kwargs) @distributed_trace_async - async def create_container(self, metadata=None, public_access=None, **kwargs): - # type: (Optional[Dict[str, str]], Optional[Union[PublicAccess, str]], **Any) -> None + async def create_container( + self, metadata: Optional[Dict[str, str]] = None, + public_access: Optional[Union["PublicAccess", str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """ Creates a new container under the specified account. If the container with the same name already exists, the operation fails. @@ -142,8 +281,13 @@ async def create_container(self, metadata=None, public_access=None, **kwargs): :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope :keyword int timeout: - The timeout parameter is expressed in seconds. - :rtype: None + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: A dictionary of response headers. + :rtype: Dict[str, Union[str, datetime]] .. admonition:: Example: @@ -170,9 +314,45 @@ async def create_container(self, metadata=None, public_access=None, **kwargs): process_storage_error(error) @distributed_trace_async - async def delete_container( - self, **kwargs): - # type: (Any) -> None + async def _rename_container(self, new_name: str, **kwargs: Any) -> "ContainerClient": + """Renames a container. + + Operation is successful only if the source container exists. + + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: The renamed container. + :rtype: ~azure.storage.blob.ContainerClient + """ + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id + except AttributeError: + kwargs['source_lease_id'] = lease + try: + renamed_container = ContainerClient( + f"{self.scheme}://{self.primary_hostname}", container_name=new_name, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) + await renamed_container._client.container.rename(self.container_name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def delete_container(self, **kwargs: Any) -> None: """ Marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. @@ -200,7 +380,11 @@ async def delete_container( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -227,10 +411,10 @@ async def delete_container( @distributed_trace_async async def acquire_lease( - self, lease_duration=-1, # type: int - lease_id=None, # type: Optional[str] - **kwargs): - # type: (...) -> BlobLeaseClient + self, lease_duration: int =-1, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> BlobLeaseClient: """ Requests a new lease. If the container does not have an active lease, the Blob service creates a lease on the container and returns a new @@ -262,7 +446,11 @@ async def acquire_lease( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A BlobLeaseClient object, that can be run in a context manager. :rtype: ~azure.storage.blob.aio.BlobLeaseClient @@ -282,8 +470,7 @@ async def acquire_lease( return lease @distributed_trace_async - async def get_account_information(self, **kwargs): - # type: (**Any) -> Dict[str, str] + async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: """Gets information related to the storage account. The information can also be retrieved if the user has a SAS to a container or blob. @@ -298,8 +485,7 @@ async def get_account_information(self, **kwargs): process_storage_error(error) @distributed_trace_async - async def get_container_properties(self, **kwargs): - # type: (**Any) -> ContainerProperties + async def get_container_properties(self, **kwargs: Any) -> ContainerProperties: """Returns all user-defined metadata and system properties for the specified container. The data returned does not include the container's list of blobs. @@ -308,7 +494,11 @@ async def get_container_properties(self, **kwargs): container's lease is active and matches this ID. :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: Properties for the specified container within a container object. :rtype: ~azure.storage.blob.ContainerProperties @@ -336,11 +526,33 @@ async def get_container_properties(self, **kwargs): return response # type: ignore @distributed_trace_async - async def set_container_metadata( # type: ignore - self, metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + async def exists(self, **kwargs: Any) -> bool: + """ + Returns True if a container exists and returns False otherwise. + + :kwarg int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: boolean + :rtype: bool + """ + try: + await self._client.container.get_properties(**kwargs) + return True + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceNotFoundError: + return False + + @distributed_trace_async + async def set_container_metadata( + self, metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets one or more user-defined name-value pairs for the specified container. Each call to this operation replaces all existing metadata attached to the container. To remove all metadata from the container, @@ -361,8 +573,13 @@ async def set_container_metadata( # type: ignore Specify this header to perform the operation only if the resource has been modified since the specified time. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Container-updated property dict (Etag and last modified). + :rtype: Dict[str, Union[str, datetime]] .. admonition:: Example: @@ -380,7 +597,7 @@ async def set_container_metadata( # type: ignore mod_conditions = get_modify_conditions(kwargs) timeout = kwargs.pop('timeout', None) try: - return await self._client.container.set_metadata( # type: ignore + return await self._client.container.set_metadata( # type: ignore timeout=timeout, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, @@ -390,9 +607,42 @@ async def set_container_metadata( # type: ignore except HttpResponseError as error: process_storage_error(error) + @distributed_trace + def _get_blob_service_client(self) -> "BlobServiceClient": + """Get a client to interact with the container's parent service account. + + Defaults to current container's credentials. + + :returns: A BlobServiceClient. + :rtype: ~azure.storage.blob.BlobServiceClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service_async.py + :start-after: [START get_blob_service_client_from_container_client] + :end-before: [END get_blob_service_client_from_container_client] + :language: python + :dedent: 8 + :caption: Get blob service client from container object. + """ + from ._blob_service_client_async import BlobServiceClient + if not isinstance(self._pipeline._transport, AsyncTransportWrapper): # pylint: disable = protected-access + _pipeline = AsyncPipeline( + transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies #type: ignore [arg-type] # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline + return BlobServiceClient( + f"{self.scheme}://{self.primary_hostname}", + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _location_mode=self._location_mode, _hosts=self._hosts, require_encryption=self.require_encryption, + encryption_version=self.encryption_version, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function, _pipeline=_pipeline) + + @distributed_trace_async - async def get_container_access_policy(self, **kwargs): - # type: (Any) -> Dict[str, Any] + async def get_container_access_policy(self, **kwargs: Any) -> Dict[str, Any]: """Gets the permissions for the specified container. The permissions indicate whether container data may be accessed publicly. @@ -401,7 +651,11 @@ async def get_container_access_policy(self, **kwargs): container's lease is active and matches this ID. :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Access policy information in a dict. :rtype: dict[str, Any] @@ -432,10 +686,10 @@ async def get_container_access_policy(self, **kwargs): @distributed_trace_async async def set_container_access_policy( - self, signed_identifiers, # type: Dict[str, AccessPolicy] - public_access=None, # type: Optional[Union[str, PublicAccess]] - **kwargs # type: Any - ): # type: (...) -> Dict[str, Union[str, datetime]] + self, signed_identifiers: Dict[str, "AccessPolicy"], + public_access: Optional[Union[str, "PublicAccess"]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets the permissions for the specified container or stored access policies that may be used with Shared Access Signatures. The permissions indicate whether blobs in a container may be accessed publicly. @@ -464,7 +718,11 @@ async def set_container_access_policy( Specify this header to perform the operation only if the resource has not been modified since the specified date/time. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Container-updated property dict (Etag and last modified). :rtype: dict[str, str or ~datetime.datetime] @@ -494,20 +752,23 @@ async def set_container_access_policy( mod_conditions = get_modify_conditions(kwargs) access_conditions = get_access_conditions(lease) try: - return await self._client.container.set_access_policy( + return cast(Dict[str, Union[str, datetime]], await self._client.container.set_access_policy( container_acl=signed_identifiers or None, timeout=timeout, access=public_access, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, cls=return_response_headers, - **kwargs) + **kwargs)) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def list_blobs(self, name_starts_with=None, include=None, **kwargs): - # type: (Optional[str], Optional[Union[str, List[str]]], **Any) -> AsyncItemPaged[BlobProperties] + def list_blobs( + self, name_starts_with: Optional[str] = None, + include: Optional[Union[str, List[str]]] = None, + **kwargs: Any + ) -> AsyncItemPaged[BlobProperties]: """Returns a generator to list the blobs under the specified container. The generator will lazily follow the continuation tokens returned by the service. @@ -515,11 +776,17 @@ def list_blobs(self, name_starts_with=None, include=None, **kwargs): :param str name_starts_with: Filters the results to return only blobs whose names begin with the specified prefix. - :param list[str] or str include: + :param include: Specifies one or more additional datasets to include in the response. - Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'tags'. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', + 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. + :type include: list[str] or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) response of BlobProperties. :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.BlobProperties] @@ -532,6 +799,10 @@ def list_blobs(self, name_starts_with=None, include=None, **kwargs): :dedent: 12 :caption: List the blobs in the container. """ + if kwargs.pop('prefix', None): + raise ValueError("Passing 'prefix' has no effect on filtering, " + + "please use the 'name_starts_with' parameter instead.") + if include and not isinstance(include, list): include = [include] @@ -546,17 +817,63 @@ def list_blobs(self, name_starts_with=None, include=None, **kwargs): command, prefix=name_starts_with, results_per_page=results_per_page, + container=self.container_name, page_iterator_class=BlobPropertiesPaged ) + @distributed_trace + def list_blob_names(self, **kwargs: Any) -> AsyncItemPaged[str]: + """Returns a generator to list the names of blobs under the specified container. + The generator will lazily follow the continuation tokens returned by + the service. + + Note that no additional properties or metadata will be returned when using this API. + Additionally this API does not have an option to include additional blobs such as snapshots, + versions, soft-deleted blobs, etc. To get any of this data, use :func:`list_blobs()`. + + :keyword str name_starts_with: + Filters the results to return only blobs whose names + begin with the specified prefix. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: An iterable (auto-paging) response of blob names as strings. + :rtype: ~azure.core.async_paging.AsyncItemPaged[str] + """ + if kwargs.pop('prefix', None): + raise ValueError("Passing 'prefix' has no effect on filtering, " + + "please use the 'name_starts_with' parameter instead.") + + name_starts_with = kwargs.pop('name_starts_with', None) + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + + # For listing only names we need to create a one-off generated client and + # override its deserializer to prevent deserialization of the full response. + client = self._build_generated_client() + client.container._deserialize = IgnoreListBlobsDeserializer() # pylint: disable=protected-access + + command = functools.partial( + client.container.list_blob_flat_segment, + timeout=timeout, + **kwargs) + return AsyncItemPaged( + command, + prefix=name_starts_with, + results_per_page=results_per_page, + container=self.container_name, + page_iterator_class=BlobNamesPaged) + @distributed_trace def walk_blobs( - self, name_starts_with=None, # type: Optional[str] - include=None, # type: Optional[Any] - delimiter="/", # type: str - **kwargs # type: Optional[Any] - ): - # type: (...) -> AsyncItemPaged[BlobProperties] + self, name_starts_with: Optional[str] = None, + include: Optional[Union[List[str], str]] = None, + delimiter: str = "/", + **kwargs: Any + ) -> AsyncItemPaged[BlobProperties]: """Returns a generator to list the blobs under the specified container. The generator will lazily follow the continuation tokens returned by the service. This operation will list blobs in accordance with a hierarchy, @@ -565,19 +882,29 @@ def walk_blobs( :param str name_starts_with: Filters the results to return only blobs whose names begin with the specified prefix. - :param list[str] include: + :param include: Specifies one or more additional datasets to include in the response. - Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted'. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', + 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. + :type include: list[str] or str :param str delimiter: When the request includes this parameter, the operation returns a BlobPrefix element in the response body that acts as a placeholder for all blobs whose names begin with the same substring up to the appearance of the delimiter character. The delimiter may be a single character or a string. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) response of BlobProperties. :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.BlobProperties] """ + if kwargs.pop('prefix', None): + raise ValueError("Passing 'prefix' has no effect on filtering, " + + "please use the 'name_starts_with' parameter instead.") + if include and not isinstance(include, list): include = [include] @@ -593,24 +920,59 @@ def walk_blobs( command, prefix=name_starts_with, results_per_page=results_per_page, + container=self.container_name, delimiter=delimiter) + @distributed_trace + def find_blobs_by_tags( + self, filter_expression: str, + **kwargs: Any + ) -> AsyncItemPaged[FilteredBlob]: + """Returns a generator to list the blobs under the specified container whose tags + match the given search expression. + The generator will lazily follow the continuation tokens returned by + the service. + + :param str filter_expression: + The expression to find blobs whose tags matches the specified condition. + eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" + :keyword int results_per_page: + The max result per page when paginating. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: An iterable (auto-paging) response of FilteredBlob. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] + """ + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.container.filter_blobs, + timeout=timeout, + where=filter_expression, + **kwargs) + return AsyncItemPaged( + command, results_per_page=results_per_page, + container=self.container_name, + page_iterator_class=FilteredBlobPaged) + @distributed_trace_async async def upload_blob( - self, name, # type: Union[str, BlobProperties] - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] - length=None, # type: Optional[int] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> BlobClient + self, name: str, + data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]], + blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, + length: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs + ) -> BlobClient: """Creates a new blob from a data source with automatic chunking. - :param name: The blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type name: str or ~azure.storage.blob.BlobProperties + :param str name: The blob with which to interact. :param data: The blob data to upload. + :type data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]] :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. :param int length: @@ -665,7 +1027,12 @@ async def upload_blob( .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. multiple calls to the Azure service and the timeout will apply to each call individually. :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: @@ -699,6 +1066,11 @@ async def upload_blob( :keyword str encoding: Defaults to UTF-8. + :keyword progress_hook: + An async callback to track the progress of a long running upload. The signature is + function(current: int, total: Optional[int]) where current is the number of bytes transferred + so far, and total is the size of the blob or None if the size is unknown. + :paramtype progress_hook: Callable[[int, Optional[int]], Awaitable[None]] :returns: A BlobClient to interact with the newly uploaded blob. :rtype: ~azure.storage.blob.aio.BlobClient @@ -711,6 +1083,12 @@ async def upload_blob( :dedent: 12 :caption: Upload blob to the container. """ + if isinstance(name, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param name is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob = self.get_blob_client(name) kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) @@ -728,11 +1106,10 @@ async def upload_blob( @distributed_trace_async async def delete_blob( - self, blob, # type: Union[str, BlobProperties] - delete_snapshots=None, # type: Optional[str] - **kwargs - ): - # type: (...) -> None + self, blob: str, + delete_snapshots: Optional[str] = None, + **kwargs: Any + ) -> None: """Marks the specified blob or snapshot for deletion. The blob is later deleted during garbage collection. @@ -743,12 +1120,10 @@ async def delete_blob( If a delete retention policy is enabled for the service, then this operation soft deletes the blob or snapshot and retains the blob or snapshot for specified number of days. After specified number of days, blob's data is removed from the service during garbage collection. - Soft deleted blob or snapshot is accessible through :func:`list_blobs()` specifying `include=["deleted"]` - option. Soft-deleted blob or snapshot can be restored using :func:`~BlobClient.undelete()` + Soft deleted blobs or snapshots are accessible through :func:`list_blobs()` specifying `include=["deleted"]` + Soft-deleted blob or snapshot can be restored using :func:`~azure.storage.blob.aio.BlobClient.undelete()` - :param blob: The blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type blob: str or ~azure.storage.blob.BlobProperties + :param str blob: The blob with which to interact. :param str delete_snapshots: Required if the blob has associated snapshots. Values include: - "only": Deletes only the blobs snapshots. @@ -758,6 +1133,7 @@ async def delete_blob( value that, when present, specifies the version of the blob to delete. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword lease: @@ -788,9 +1164,19 @@ async def delete_blob( .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None """ + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) @@ -799,22 +1185,56 @@ async def delete_blob( timeout=timeout, **kwargs) + @overload + async def download_blob( + self, blob: str, + offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: str, + **kwargs: Any + ) -> StorageStreamDownloader[str]: + ... + + @overload + async def download_blob( + self, blob: str, + offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: None = None, + **kwargs: Any + ) -> StorageStreamDownloader[bytes]: + ... + @distributed_trace_async - async def download_blob(self, blob, offset=None, length=None, **kwargs): - # type: (Union[str, BlobProperties], Optional[int], Optional[int], Any) -> StorageStreamDownloader + async def download_blob( + self, blob: str, + offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: Union[str, None] = None, + **kwargs: Any + ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: """Downloads a blob to the StorageStreamDownloader. The readall() method must be used to read all the content or readinto() must be used to download the blob into - a stream. + a stream. Using chunks() returns an async iterator which allows the user to iterate over the content in chunks. - :param blob: The blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type blob: str or ~azure.storage.blob.BlobProperties + :param str blob: The blob with which to interact. :param int offset: Start of byte range to use for downloading a section of the blob. Must be set if length is provided. :param int length: Number of bytes to read from the stream. This is optional, but should be supplied for optimal performance. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to download. + + .. versionadded:: 12.4.0 + + This keyword argument was introduced in API version '2019-12-12'. + :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the blob. The storage service checks the hash of the content that has arrived with the hash @@ -861,24 +1281,41 @@ async def download_blob(self, blob, offset=None, length=None, **kwargs): The number of parallel connections with which to download. :keyword str encoding: Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword progress_hook: + An async callback to track the progress of a long running download. The signature is + function(current: int, total: int) where current is the number of bytes transferred + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], Awaitable[None]] :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. multiple calls to the Azure service and the timeout will apply to each call individually. :returns: A streaming object. (StorageStreamDownloader) :rtype: ~azure.storage.blob.aio.StorageStreamDownloader """ + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob_client = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) return await blob_client.download_blob( offset=offset, length=length, + encoding=encoding, **kwargs) @distributed_trace_async - async def delete_blobs( # pylint: disable=arguments-differ - self, *blobs: List[Union[str, BlobProperties, dict]], - **kwargs + async def delete_blobs( + self, *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any ) -> AsyncIterator[AsyncHttpResponse]: """Marks the specified blobs or snapshots for deletion. @@ -890,7 +1327,9 @@ async def delete_blobs( # pylint: disable=arguments-differ and retains the blobs or snapshots for specified number of days. After specified number of days, blobs' data is removed from the service during garbage collection. Soft deleted blobs or snapshots are accessible through :func:`list_blobs()` specifying `include=["deleted"]` - Soft-deleted blobs or snapshots can be restored using :func:`~BlobClient.undelete()` + Soft-deleted blobs or snapshots can be restored using :func:`~azure.storage.blob.aio.BlobClient.undelete()` + + The maximum number of blobs that can be deleted in a single request is 256. :param blobs: The blobs to delete. This can be a single blob, or multiple values can @@ -903,7 +1342,9 @@ async def delete_blobs( # pylint: disable=arguments-differ key: 'name', value type: str snapshot you want to delete: key: 'snapshot', value type: str - whether to delete snapthots when deleting blob: + version id: + key: 'version_id', value type: str + whether to delete snapshots when deleting blob: key: 'delete_snapshots', value: 'include' or 'only' if the blob modified or not: key: 'if_modified_since', 'if_unmodified_since', value type: datetime @@ -918,7 +1359,7 @@ async def delete_blobs( # pylint: disable=arguments-differ timeout for subrequest: key: 'timeout', value type: int - :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :type blobs: Union[str, Dict[str, Any], BlobProperties] :keyword str delete_snapshots: Required if a blob has associated snapshots. Values include: - "only": Deletes only the blobs snapshots. @@ -946,7 +1387,11 @@ async def delete_blobs( # pylint: disable=arguments-differ is raised even if there is a single operation failure. For optimal performance, this should be set to False :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: An async iterator of responses, one for each blob in order :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] @@ -960,24 +1405,33 @@ async def delete_blobs( # pylint: disable=arguments-differ :caption: Deleting multiple blobs. """ if len(blobs) == 0: - return iter(list()) - - reqs, options = self._generate_delete_blobs_options(*blobs, **kwargs) + return AsyncList([]) + if self._is_localhost: + kwargs['url_prepend'] = self.account_name + + reqs, options = _generate_delete_blobs_options( + self._query_str, + self.container_name, + self._client, + *blobs, + **kwargs + ) - return await self._batch_send(*reqs, **options) + return cast(AsyncIterator[AsyncHttpResponse], await self._batch_send(*reqs, **options)) - @distributed_trace + @distributed_trace_async async def set_standard_blob_tier_blobs( - self, - standard_blob_tier: Union[str, 'StandardBlobTier'], - *blobs: List[Union[str, BlobProperties, dict]], - **kwargs + self, standard_blob_tier: Union[str, 'StandardBlobTier'], + *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any ) -> AsyncIterator[AsyncHttpResponse]: """This operation sets the tier on block blobs. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's ETag. + The maximum number of blobs that can be updated in a single request is 256. + :param standard_blob_tier: Indicates the tier to be set on all blobs. Options include 'Hot', 'Cool', 'Archive'. The hot tier is optimized for storing data that is accessed @@ -997,6 +1451,7 @@ async def set_standard_blob_tier_blobs( .. note:: When the blob type is dict, here's a list of keys, value rules. + blob name: key: 'name', value type: str standard blob tier: @@ -1010,7 +1465,7 @@ async def set_standard_blob_tier_blobs( timeout for subrequest: key: 'timeout', value type: int - :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: Indicates the priority with which to rehydrate an archived blob :keyword str if_tags_match_condition: @@ -1020,7 +1475,11 @@ async def set_standard_blob_tier_blobs( .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword bool raise_on_any_failure: This is a boolean param which defaults to True. When this is set, an exception is raised even if there is a single operation failure. For optimal performance, @@ -1028,19 +1487,28 @@ async def set_standard_blob_tier_blobs( :return: An async iterator of responses, one for each blob in order :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] """ - reqs, options = self._generate_set_tiers_options(standard_blob_tier, *blobs, **kwargs) + if self._is_localhost: + kwargs['url_prepend'] = self.account_name + reqs, options = _generate_set_tiers_options( + self._query_str, + self.container_name, + standard_blob_tier, + self._client, + *blobs, + **kwargs) - return await self._batch_send(*reqs, **options) + return cast(AsyncIterator[AsyncHttpResponse], await self._batch_send(*reqs, **options)) - @distributed_trace + @distributed_trace_async async def set_premium_page_blob_tier_blobs( - self, - premium_page_blob_tier: Union[str, 'PremiumPageBlobTier'], - *blobs: List[Union[str, BlobProperties, dict]], - **kwargs + self, premium_page_blob_tier: Union[str, 'PremiumPageBlobTier'], + *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any ) -> AsyncIterator[AsyncHttpResponse]: """Sets the page blob tiers on the blobs. This API is only supported for page blobs on premium accounts. + The maximum number of blobs that can be updated in a single request is 256. + :param premium_page_blob_tier: A page blob tier value to set on all blobs to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -1066,11 +1534,13 @@ async def set_premium_page_blob_tier_blobs( timeout for subrequest: key: 'timeout', value type: int - :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make - multiple calls to the Azure service and the timeout will apply to - each call individually. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword bool raise_on_any_failure: This is a boolean param which defaults to True. When this is set, an exception is raised even if there is a single operation failure. For optimal performance, @@ -1078,25 +1548,35 @@ async def set_premium_page_blob_tier_blobs( :return: An async iterator of responses, one for each blob in order :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] """ - reqs, options = self._generate_set_tiers_options(premium_page_blob_tier, *blobs, **kwargs) + if self._is_localhost: + kwargs['url_prepend'] = self.account_name + reqs, options = _generate_set_tiers_options( + self._query_str, + self.container_name, + premium_page_blob_tier, + self._client, + *blobs, + **kwargs) - return await self._batch_send(*reqs, **options) + return cast(AsyncIterator[AsyncHttpResponse], await self._batch_send(*reqs, **options)) def get_blob_client( - self, blob, # type: Union[BlobProperties, str] - snapshot=None # type: str - ): - # type: (...) -> BlobClient + self, blob: str, + snapshot: Optional[str] = None, + *, + version_id: Optional[str] = None + ) -> BlobClient: """Get a client to interact with the specified blob. The blob need not already exist. - :param blob: + :param str blob: The blob with which to interact. - :type blob: str or ~azure.storage.blob.BlobProperties :param str snapshot: The optional blob snapshot on which to operate. This can be the snapshot ID string or the response returned from :func:`~BlobClient.create_snapshot()`. + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. :returns: A BlobClient. :rtype: ~azure.storage.blob.aio.BlobClient @@ -1109,14 +1589,23 @@ def get_blob_client( :dedent: 12 :caption: Get the blob client. """ - blob_name = _get_blob_name(blob) + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) + blob_name = blob.get('name') + else: + blob_name = blob _pipeline = AsyncPipeline( transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access + policies=self._pipeline._impl_policies # type: ignore [arg-type] # pylint: disable = protected-access ) return BlobClient( self.url, container_name=self.container_name, blob_name=blob_name, snapshot=snapshot, credential=self.credential, api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function, loop=self._loop) + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, + version_id=version_id) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_download_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_download_async.py index 44ba51d272d1..dab5afdca85d 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_download_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_download_async.py @@ -4,31 +4,54 @@ # license information. # -------------------------------------------------------------------------- # pylint: disable=invalid-overridden-method +# mypy: disable-error-code=override import asyncio +import codecs import sys -from io import BytesIO -from itertools import islice import warnings +from io import BytesIO, StringIO +from itertools import islice +from typing import ( + Any, AsyncIterator, Awaitable, + Generator, Callable, cast, Dict, + Generic, IO, Optional, overload, + Tuple, TypeVar, Union, TYPE_CHECKING +) + +from azure.core.exceptions import DecodeError, HttpResponseError, IncompleteReadError -from azure.core.exceptions import HttpResponseError -from .._shared.encryption import decrypt_blob from .._shared.request_handlers import validate_and_format_range_headers -from .._shared.response_handlers import process_storage_error, parse_length_from_content_range -from .._deserialize import get_page_ranges_result +from .._shared.response_handlers import parse_length_from_content_range, process_storage_error +from .._deserialize import deserialize_blob_properties, get_page_ranges_result from .._download import process_range_and_offset, _ChunkDownloader +from .._encryption import ( + adjust_blob_size_for_encryption, + decrypt_blob, + is_encryption_v2, + parse_encryption_data +) + +if TYPE_CHECKING: + from codecs import IncrementalDecoder + from .._encryption import _EncryptionData + from .._generated.aio import AzureBlobStorage + from .._models import BlobProperties + from .._shared.models import StorageConfiguration + + +T = TypeVar('T', bytes, str) -async def process_content(data, start_offset, end_offset, encryption): + +async def process_content(data: Any, start_offset: int, end_offset: int, encryption: Dict[str, Any]) -> bytes: if data is None: raise ValueError("Response cannot be None.") - try: - content = data.response.body() - except Exception as error: - raise HttpResponseError(message="Download stream interrupted.", response=data.response, error=error) + await data.response.read() + content = cast(bytes, data.response.content) if encryption.get('key') is not None or encryption.get('resolver') is not None: try: return decrypt_blob( - encryption.get('required'), + encryption.get('required') or False, encryption.get('key'), encryption.get('resolver'), content, @@ -39,153 +62,189 @@ async def process_content(data, start_offset, end_offset, encryption): raise HttpResponseError( message="Decryption failed.", response=data.response, - error=error) + error=error) from error return content class _AsyncChunkDownloader(_ChunkDownloader): - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: super(_AsyncChunkDownloader, self).__init__(**kwargs) - self.stream_lock = asyncio.Lock() if kwargs.get('parallel') else None - self.progress_lock = asyncio.Lock() if kwargs.get('parallel') else None + self.stream_lock_async = asyncio.Lock() if kwargs.get('parallel') else None + self.progress_lock_async = asyncio.Lock() if kwargs.get('parallel') else None - async def process_chunk(self, chunk_start): + async def process_chunk(self, chunk_start: int) -> None: chunk_start, chunk_end = self._calculate_range(chunk_start) - chunk_data = await self._download_chunk(chunk_start, chunk_end - 1) + chunk_data, _ = await self._download_chunk(chunk_start, chunk_end - 1) length = chunk_end - chunk_start if length > 0: await self._write_to_stream(chunk_data, chunk_start) await self._update_progress(length) - async def yield_chunk(self, chunk_start): + async def yield_chunk(self, chunk_start: int) -> Tuple[bytes, int]: chunk_start, chunk_end = self._calculate_range(chunk_start) return await self._download_chunk(chunk_start, chunk_end - 1) - async def _update_progress(self, length): - if self.progress_lock: - async with self.progress_lock: # pylint: disable=not-async-context-manager + async def _update_progress(self, length: int) -> None: + if self.progress_lock_async: + async with self.progress_lock_async: self.progress_total += length else: self.progress_total += length - async def _write_to_stream(self, chunk_data, chunk_start): - if self.stream_lock: - async with self.stream_lock: # pylint: disable=not-async-context-manager + if self.progress_hook: + await cast(Callable[[int, Optional[int]], Awaitable[Any]], self.progress_hook)( + self.progress_total, self.total_size) + + async def _write_to_stream(self, chunk_data: bytes, chunk_start: int) -> None: + if self.stream_lock_async: + async with self.stream_lock_async: self.stream.seek(self.stream_start + (chunk_start - self.start_index)) self.stream.write(chunk_data) else: self.stream.write(chunk_data) - async def _download_chunk(self, chunk_start, chunk_end): + async def _download_chunk(self, chunk_start: int, chunk_end: int) -> Tuple[bytes, int]: + if self.encryption_options is None: + raise ValueError("Required argument is missing: encryption_options") download_range, offset = process_range_and_offset( - chunk_start, chunk_end, chunk_end, self.encryption_options) + chunk_start, chunk_end, chunk_end, self.encryption_options, self.encryption_data + ) # No need to download the empty chunk from server if there's no data in the chunk to be downloaded. # Do optimize and create empty chunk locally if condition is met. if self._do_optimize(download_range[0], download_range[1]): - chunk_data = b"\x00" * self.chunk_size + content_length = download_range[1] - download_range[0] + 1 + chunk_data = b"\x00" * content_length else: range_header, range_validation = validate_and_format_range_headers( download_range[0], download_range[1], check_content_md5=self.validate_content ) - try: - _, response = await self.client.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self.validate_content, - data_stream_total=self.total_size, - download_stream_current=self.progress_total, - **self.request_options - ) - except HttpResponseError as error: - process_storage_error(error) - chunk_data = await process_content(response, offset[0], offset[1], self.encryption_options) + retry_active = True + retry_total = 3 + while retry_active: + try: + _, response = await cast(Awaitable[Any], self.client.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self.validate_content, + data_stream_total=self.total_size, + download_stream_current=self.progress_total, + **self.request_options + )) + except HttpResponseError as error: + process_storage_error(error) + + try: + chunk_data = await process_content(response, offset[0], offset[1], self.encryption_options) + retry_active = False + except (IncompleteReadError, HttpResponseError, DecodeError) as error: + retry_total -= 1 + if retry_total <= 0: + raise HttpResponseError(error, error=error) from error + await asyncio.sleep(1) + content_length = response.content_length # This makes sure that if_match is set so that we can validate # that subsequent downloads are to an unmodified blob if self.request_options.get('modified_access_conditions'): self.request_options['modified_access_conditions'].if_match = response.properties.etag - return chunk_data + return chunk_data, content_length class _AsyncChunkIterator(object): """Async iterator for chunks in blob download stream.""" - def __init__(self, size, content, downloader): + def __init__(self, size: int, content: bytes, downloader: Optional[_AsyncChunkDownloader], chunk_size: int) -> None: self.size = size + self._chunk_size = chunk_size self._current_content = content self._iter_downloader = downloader - self._iter_chunks = None - self._complete = (size == 0) + self._iter_chunks: Optional[Generator[int, None, None]] = None + self._complete = size == 0 - def __len__(self): + def __len__(self) -> int: return self.size - def __iter__(self): + def __iter__(self) -> None: raise TypeError("Async stream must be iterated asynchronously.") - def __aiter__(self): + def __aiter__(self) -> AsyncIterator[bytes]: return self - async def __anext__(self): - """Iterate through responses.""" + # Iterate through responses. + async def __anext__(self) -> bytes: if self._complete: raise StopAsyncIteration("Download complete") if not self._iter_downloader: - # If no iterator was supplied, the download completed with - # the initial GET, so we just return that data + # cut the data obtained from initial GET into chunks + if len(self._current_content) > self._chunk_size: + return self._get_chunk_data() self._complete = True return self._current_content if not self._iter_chunks: self._iter_chunks = self._iter_downloader.get_chunk_offsets() - else: - try: - chunk = next(self._iter_chunks) - except StopIteration: - raise StopAsyncIteration("Download complete") - self._current_content = await self._iter_downloader.yield_chunk(chunk) - return self._current_content + # initial GET result still has more than _chunk_size bytes of data + if len(self._current_content) >= self._chunk_size: + return self._get_chunk_data() + try: + chunk = next(self._iter_chunks) + self._current_content += (await self._iter_downloader.yield_chunk(chunk))[0] + except StopIteration as exc: + self._complete = True + # it's likely that there some data left in self._current_content + if self._current_content: + return self._current_content + raise StopAsyncIteration("Download complete") from exc -class StorageStreamDownloader(object): # pylint: disable=too-many-instance-attributes - """A streaming object to download from Azure Storage. + return self._get_chunk_data() - :ivar str name: - The name of the blob being downloaded. - :ivar str container: - The name of the container where the blob is. - :ivar ~azure.storage.blob.BlobProperties properties: - The properties of the blob being downloaded. If only a range of the data is being - downloaded, this will be reflected in the properties. - :ivar int size: - The size of the total data in the stream. This will be the byte range if speficied, - otherwise the total size of the blob. + def _get_chunk_data(self) -> bytes: + chunk_data = self._current_content[: self._chunk_size] + self._current_content = self._current_content[self._chunk_size:] + return chunk_data + + +class StorageStreamDownloader(Generic[T]): # pylint: disable=too-many-instance-attributes + """ + A streaming object to download from Azure Storage. """ + name: str + """The name of the blob being downloaded.""" + container: str + """The name of the container where the blob is.""" + properties: "BlobProperties" + """The properties of the blob being downloaded. If only a range of the data is being + downloaded, this will be reflected in the properties.""" + size: int + """The size of the total data in the stream. This will be the byte range if specified, + otherwise the total size of the blob.""" + def __init__( - self, - clients=None, - config=None, - start_range=None, - end_range=None, - validate_content=None, - encryption_options=None, - max_concurrency=1, - name=None, - container=None, - encoding=None, - **kwargs - ): + self, + clients: "AzureBlobStorage" = None, # type: ignore [assignment] + config: "StorageConfiguration" = None, # type: ignore [assignment] + start_range: Optional[int] = None, + end_range: Optional[int] = None, + validate_content: bool = None, # type: ignore [assignment] + encryption_options: Dict[str, Any] = None, # type: ignore [assignment] + max_concurrency: int = 1, + name: str = None, # type: ignore [assignment] + container: str = None, # type: ignore [assignment] + encoding: Optional[str] = None, + download_cls: Optional[Callable] = None, + **kwargs: Any + ) -> None: self.name = name self.container = container - self.properties = None - self.size = None + self.size = 0 self._clients = clients self._config = config @@ -195,63 +254,98 @@ def __init__( self._encoding = encoding self._validate_content = validate_content self._encryption_options = encryption_options or {} + self._progress_hook = kwargs.pop('progress_hook', None) self._request_options = kwargs + self._response = None self._location_mode = None - self._download_complete = False - self._current_content = None - self._file_size = None + self._current_content: Union[str, bytes] = b'' + self._file_size = 0 self._non_empty_ranges = None - self._response = None + self._encryption_data: Optional["_EncryptionData"] = None + + # The content download offset, after any processing (decryption), in bytes + self._download_offset = 0 + # The raw download offset, before processing (decryption), in bytes + self._raw_download_offset = 0 + # The offset the stream has been read to in bytes or chars depending on mode + self._read_offset = 0 + # The offset into current_content that has been consumed in bytes or chars depending on mode + self._current_content_offset = 0 + + self._text_mode: Optional[bool] = None + self._decoder: Optional["IncrementalDecoder"] = None + # Whether the current content is the first chunk of download content or not + self._first_chunk = True + self._download_start = self._start_range or 0 + + # The cls is passed in via download_cls to avoid conflicting arg name with Generic.__new__ + # but needs to be changed to cls in the request options. + self._request_options['cls'] = download_cls + + def __len__(self): + return self.size + + async def _get_encryption_data_request(self) -> None: + # Save current request cls + download_cls = self._request_options.pop('cls', None) + # Adjust cls for get_properties + self._request_options['cls'] = deserialize_blob_properties + + properties = cast("BlobProperties", await self._clients.blob.get_properties(**self._request_options)) + # This will return None if there is no encryption metadata or there are parsing errors. + # That is acceptable here, the proper error will be caught and surfaced when attempting + # to decrypt the blob. + self._encryption_data = parse_encryption_data(properties.metadata) + + # Restore cls for download + self._request_options['cls'] = download_cls + + async def _setup(self) -> None: + if self._encryption_options.get("key") is not None or self._encryption_options.get("resolver") is not None: + await self._get_encryption_data_request() # The service only provides transactional MD5s for chunks under 4MB. # If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first # chunk so a transactional MD5 can be retrieved. - self._first_get_size = self._config.max_single_get_size if not self._validate_content \ - else self._config.max_chunk_get_size + first_get_size = ( + self._config.max_single_get_size if not self._validate_content else self._config.max_chunk_get_size + ) initial_request_start = self._start_range if self._start_range is not None else 0 - if self._end_range is not None and self._end_range - self._start_range < self._first_get_size: + if self._end_range is not None and self._end_range - initial_request_start < first_get_size: initial_request_end = self._end_range else: - initial_request_end = initial_request_start + self._first_get_size - 1 + initial_request_end = initial_request_start + first_get_size - 1 + # pylint: disable-next=attribute-defined-outside-init self._initial_range, self._initial_offset = process_range_and_offset( - initial_request_start, initial_request_end, self._end_range, self._encryption_options + initial_request_start, + initial_request_end, + self._end_range, + self._encryption_options, + self._encryption_data ) - def __len__(self): - return self.size - - async def _setup(self): self._response = await self._initial_request() - self.properties = self._response.properties + self.properties = cast("BlobProperties", self._response.properties) # type: ignore [attr-defined] self.properties.name = self.name self.properties.container = self.container - # Set the content length to the download size instead of the size of - # the last range + # Set the content length to the download size instead of the size of the last range self.properties.size = self.size - - # Overwrite the content range to the user requested range - self.properties.content_range = 'bytes {0}-{1}/{2}'.format( - self._start_range, - self._end_range, - self._file_size - ) + self.properties.content_range = (f"bytes {self._download_start}-" + f"{self._end_range if self._end_range is not None else self._file_size - 1}/" + f"{self._file_size}") # Overwrite the content MD5 as it is the MD5 for the last range instead # of the stored MD5 # TODO: Set to the stored MD5 when the service returns this - self.properties.content_md5 = None + self.properties.content_md5 = None # type: ignore [attr-defined] - if self.size == 0: - self._current_content = b"" - else: - self._current_content = await process_content( - self._response, - self._initial_offset[0], - self._initial_offset[1], - self._encryption_options - ) + @property + def _download_complete(self): + if is_encryption_v2(self._encryption_data): + return self._download_offset >= self.size + return self._raw_download_offset >= self.size async def _initial_request(self): range_header, range_validation = validate_and_format_range_headers( @@ -259,51 +353,80 @@ async def _initial_request(self): self._initial_range[1], start_range_required=False, end_range_required=False, - check_content_md5=self._validate_content) + check_content_md5=self._validate_content + ) - try: - location_mode, response = await self._clients.blob.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self._validate_content, - data_stream_total=None, - download_stream_current=0, - **self._request_options) - - # Check the location we read from to ensure we use the same one - # for subsequent requests. - self._location_mode = location_mode - - # Parse the total file size and adjust the download size if ranges - # were specified - self._file_size = parse_length_from_content_range(response.properties.content_range) - if self._end_range is not None: - # Use the length unless it is over the end of the file - self.size = min(self._file_size, self._end_range - self._start_range + 1) - elif self._start_range is not None: - self.size = self._file_size - self._start_range - else: - self.size = self._file_size + retry_active = True + retry_total = 3 + while retry_active: + try: + location_mode, response = cast(Tuple[Optional[str], Any], await self._clients.blob.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self._validate_content, + data_stream_total=None, + download_stream_current=0, + **self._request_options + )) + + # Check the location we read from to ensure we use the same one + # for subsequent requests. + self._location_mode = location_mode + + # Parse the total file size and adjust the download size if ranges + # were specified + self._file_size = parse_length_from_content_range(response.properties.content_range) + if self._file_size is None: + raise ValueError("Required Content-Range response header is missing or malformed.") + # Remove any extra encryption data size from blob size + self._file_size = adjust_blob_size_for_encryption(self._file_size, self._encryption_data) + + if self._end_range is not None and self._start_range is not None: + # Use the length unless it is over the end of the file + self.size = min(self._file_size - self._start_range, self._end_range - self._start_range + 1) + elif self._start_range is not None: + self.size = self._file_size - self._start_range + else: + self.size = self._file_size - except HttpResponseError as error: - if self._start_range is None and error.response.status_code == 416: - # Get range will fail on an empty file. If the user did not - # request a range, do a regular get request in order to get - # any properties. - try: - _, response = await self._clients.blob.download( - validate_content=self._validate_content, - data_stream_total=0, - download_stream_current=0, - **self._request_options) - except HttpResponseError as error: + except HttpResponseError as error: + if self._start_range is None and error.response and error.status_code == 416: + # Get range will fail on an empty file. If the user did not + # request a range, do a regular get request in order to get + # any properties. + try: + _, response = cast(Tuple[Optional[Any], Any], await self._clients.blob.download( + validate_content=self._validate_content, + data_stream_total=0, + download_stream_current=0, + **self._request_options)) + except HttpResponseError as e: + process_storage_error(e) + + # Set the download size to empty + self.size = 0 + self._file_size = 0 + else: process_storage_error(error) - # Set the download size to empty - self.size = 0 - self._file_size = 0 - else: - process_storage_error(error) + try: + if self.size == 0: + self._current_content = b"" + else: + self._current_content = await process_content( + response, + self._initial_offset[0], + self._initial_offset[1], + self._encryption_options + ) + retry_active = False + except (IncompleteReadError, HttpResponseError, DecodeError) as error: + retry_total -= 1 + if retry_total <= 0: + raise HttpResponseError(error, error=error) from error + await asyncio.sleep(1) + self._download_offset += len(self._current_content) + self._raw_download_offset += response.content_length # get page ranges to optimize downloading sparse page blob if response.properties.blob_type == 'PageBlob': @@ -313,106 +436,266 @@ async def _initial_request(self): except HttpResponseError: pass - # If the file is small, the download is complete at this point. - # If file size is large, download the rest of the file in chunks. - if response.properties.size != self.size: - # Lock on the etag. This can be overriden by the user by specifying '*' - if self._request_options.get('modified_access_conditions'): - if not self._request_options['modified_access_conditions'].if_match: - self._request_options['modified_access_conditions'].if_match = response.properties.etag - else: - self._download_complete = True + if not self._download_complete and self._request_options.get("modified_access_conditions"): + self._request_options["modified_access_conditions"].if_match = response.properties.etag + return response - def chunks(self): - """Iterate over chunks in the download stream. + def chunks(self) -> AsyncIterator[bytes]: + """ + Iterate over chunks in the download stream. Note, the iterator returned will + iterate over the entire download content, regardless of any data that was + previously read. + + NOTE: If the stream has been partially read, some data may be re-downloaded by the iterator. + + :returns: An async iterator of the chunks in the download stream. + :rtype: AsyncIterator[bytes] + + .. admonition:: Example: - :rtype: Iterable[bytes] + .. literalinclude:: ../samples/blob_samples_hello_world_async.py + :start-after: [START download_a_blob_in_chunk] + :end-before: [END download_a_blob_in_chunk] + :language: python + :dedent: 16 + :caption: Download a blob using chunks(). """ - if self.size == 0 or self._download_complete: - iter_downloader = None - else: - data_end = self._file_size - if self._end_range is not None: - # Use the length unless it is over the end of the file - data_end = min(self._file_size, self._end_range + 1) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. chunks is not supported in text mode.") + if self._encoding: + warnings.warn("Encoding is ignored with chunks as only bytes are supported.") + + iter_downloader = None + # If we still have the first chunk buffered, use it. Otherwise, download all content again + if not self._first_chunk or not self._download_complete: + if self._first_chunk: + start = self._download_start + len(self._current_content) + current_progress = len(self._current_content) + else: + start = self._download_start + current_progress = 0 + + end = self._download_start + self.size + iter_downloader = _AsyncChunkDownloader( client=self._clients.blob, non_empty_ranges=self._non_empty_ranges, total_size=self.size, chunk_size=self._config.max_chunk_get_size, - current_progress=self._first_get_size, - start_range=self._initial_range[1] + 1, # Start where the first download ended - end_range=data_end, - stream=None, - parallel=False, + current_progress=current_progress, + start_range=start, + end_range=end, validate_content=self._validate_content, encryption_options=self._encryption_options, + encryption_data=self._encryption_data, use_location=self._location_mode, - **self._request_options) + **self._request_options + ) + + initial_content = self._current_content if self._first_chunk else b'' return _AsyncChunkIterator( size=self.size, - content=self._current_content, - downloader=iter_downloader) + content=cast(bytes, initial_content), + downloader=iter_downloader, + chunk_size=self._config.max_chunk_get_size) - async def readall(self): - """Download the contents of this blob. + @overload + async def read(self, size: int = -1) -> T: + ... - This operation is blocking until all data is downloaded. - :rtype: bytes or str + @overload + async def read(self, *, chars: Optional[int] = None) -> T: + ... + + # pylint: disable-next=too-many-statements,too-many-branches + async def read(self, size: int = -1, *, chars: Optional[int] = None) -> T: """ - stream = BytesIO() - await self.readinto(stream) - data = stream.getvalue() - if self._encoding: - return data.decode(self._encoding) - return data + Read the specified bytes or chars from the stream. If `encoding` + was specified on `download_blob`, it is recommended to use the + chars parameter to read a specific number of chars to avoid decoding + errors. If size/chars is unspecified or negative all bytes will be read. + + :param int size: + The number of bytes to download from the stream. Leave unspecified + or set negative to download all bytes. + :keyword Optional[int] chars: + The number of chars to download from the stream. Leave unspecified + or set negative to download all chars. Note, this can only be used + when encoding is specified on `download_blob`. + :returns: + The requested data as bytes or a string if encoding was specified. If + the return value is empty, there is no more data to read. + :rtype: T + """ + if size > -1 and self._encoding: + warnings.warn( + "Size parameter specified with text encoding enabled. It is recommended to use chars " + "to read a specific number of characters instead." + ) + if size > -1 and chars is not None: + raise ValueError("Cannot specify both size and chars.") + if not self._encoding and chars is not None: + raise ValueError("Must specify encoding to read chars.") + if self._text_mode and size > -1: + raise ValueError("Stream has been partially read in text mode. Please use chars.") + if self._text_mode is False and chars is not None: + raise ValueError("Stream has been partially read in bytes mode. Please use size.") + + # Empty blob or already read to the end + if (size == 0 or chars == 0 or + (self._download_complete and self._current_content_offset >= len(self._current_content))): + return b'' if not self._encoding else '' # type: ignore [return-value] + + if not self._text_mode and chars is not None and self._encoding is not None: + self._text_mode = True + self._decoder = codecs.getincrementaldecoder(self._encoding)('strict') + self._current_content = self._decoder.decode( + cast(bytes, self._current_content), final=self._download_complete) + elif self._text_mode is None: + self._text_mode = False + + output_stream: Union[BytesIO, StringIO] + if self._text_mode: + output_stream = StringIO() + size = chars if chars else sys.maxsize + else: + output_stream = BytesIO() + size = size if size > 0 else sys.maxsize + readall = size == sys.maxsize + count = 0 + + # Start by reading from current_content + start = self._current_content_offset + length = min(len(self._current_content) - self._current_content_offset, size - count) + read = output_stream.write(self._current_content[start:start + length]) # type: ignore [arg-type] + + count += read + self._current_content_offset += read + self._read_offset += read + await self._check_and_report_progress() + + remaining = size - count + if remaining > 0 and not self._download_complete: + # Create a downloader than can download the rest of the file + start = self._download_start + self._download_offset + end = self._download_start + self.size + + parallel = self._max_concurrency > 1 + downloader = _AsyncChunkDownloader( + client=self._clients.blob, + non_empty_ranges=self._non_empty_ranges, + total_size=self.size, + chunk_size=self._config.max_chunk_get_size, + current_progress=self._read_offset, + start_range=start, + end_range=end, + stream=output_stream, + parallel=parallel, + validate_content=self._validate_content, + encryption_options=self._encryption_options, + encryption_data=self._encryption_data, + use_location=self._location_mode, + progress_hook=self._progress_hook, + **self._request_options + ) + self._first_chunk = False + + # When reading all data, have the downloader read everything into the stream. + # Else, read one chunk at a time (using the downloader as an iterator) until + # the requested size is reached. + chunks_iter = downloader.get_chunk_offsets() + if readall and not self._text_mode: + running_futures: Any = [ + asyncio.ensure_future(downloader.process_chunk(d)) + for d in islice(chunks_iter, 0, self._max_concurrency) + ] + while running_futures: + # Wait for some download to finish before adding a new one + done, running_futures = await asyncio.wait( + running_futures, return_when=asyncio.FIRST_COMPLETED) + try: + for task in done: + task.result() + except HttpResponseError as error: + process_storage_error(error) + try: + for _ in range(0, len(done)): + next_chunk = next(chunks_iter) + running_futures.add(asyncio.ensure_future(downloader.process_chunk(next_chunk))) + except StopIteration: + break + + if running_futures: + # Wait for the remaining downloads to finish + done, _running_futures = await asyncio.wait(running_futures) + try: + for task in done: + task.result() + except HttpResponseError as error: + process_storage_error(error) + + self._complete_read() - async def content_as_bytes(self, max_concurrency=1): - """Download the contents of this file. + else: + while (chunk := next(chunks_iter, None)) is not None and remaining > 0: + chunk_data, content_length = await downloader.yield_chunk(chunk) + self._download_offset += len(chunk_data) + self._raw_download_offset += content_length + if self._text_mode and self._decoder is not None: + self._current_content = self._decoder.decode(chunk_data, final=self._download_complete) + else: + self._current_content = chunk_data + + if remaining < len(self._current_content): + read = output_stream.write(self._current_content[:remaining]) # type: ignore [arg-type] + else: + read = output_stream.write(self._current_content) # type: ignore [arg-type] + + self._current_content_offset = read + self._read_offset += read + remaining -= read + await self._check_and_report_progress() + + data = output_stream.getvalue() + if not self._text_mode and self._encoding: + try: + # This is technically incorrect to do, but we have it for backwards compatibility. + data = cast(bytes, data).decode(self._encoding) + except UnicodeDecodeError: + warnings.warn( + "Encountered a decoding error while decoding blob data from a partial read. " + "Try using the `chars` keyword instead to read in text mode." + ) + raise - This operation is blocking until all data is downloaded. + return data # type: ignore [return-value] - :keyword int max_concurrency: - The number of parallel connections with which to download. - :rtype: bytes + async def readall(self) -> T: """ - warnings.warn( - "content_as_bytes is deprecated, use readall instead", - DeprecationWarning - ) - self._max_concurrency = max_concurrency - return await self.readall() - - async def content_as_text(self, max_concurrency=1, encoding="UTF-8"): - """Download the contents of this blob, and decode as text. - + Read the entire contents of this blob. This operation is blocking until all data is downloaded. - :param int max_concurrency: - The number of parallel connections with which to download. - :param str encoding: - Test encoding to decode the downloaded bytes. Default is UTF-8. - :rtype: str + :returns: The requested data as bytes or a string if encoding was specified. + :rtype: T """ - warnings.warn( - "content_as_text is deprecated, use readall instead", - DeprecationWarning - ) - self._max_concurrency = max_concurrency - self._encoding = encoding - return await self.readall() + return await self.read() - async def readinto(self, stream): + async def readinto(self, stream: IO[bytes]) -> int: """Download the contents of this blob to a stream. - :param stream: + :param IO[bytes] stream: The stream to download to. This can be an open file-handle, or any writable stream. The stream must be seekable if the download uses more than one parallel connection. :returns: The number of bytes read. :rtype: int """ + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. readinto is not supported in text mode.") + if self._encoding: + warnings.warn("Encoding is ignored with readinto as only byte streams are supported.") + # the stream must be seekable if parallel download is required parallel = self._max_concurrency > 1 if parallel: @@ -422,39 +705,55 @@ async def readinto(self, stream): try: stream.seek(stream.tell()) - except (NotImplementedError, AttributeError): - raise ValueError(error_message) - - # Write the content to the user stream - stream.write(self._current_content) + except (NotImplementedError, AttributeError) as exc: + raise ValueError(error_message) from exc + + # If some data has been streamed using `read`, only stream the remaining data + remaining_size = self.size - self._read_offset + # Already read to the end + if remaining_size <= 0: + return 0 + + # Write the current content to the user stream + current_remaining = len(self._current_content) - self._current_content_offset + start = self._current_content_offset + count = stream.write(cast(bytes, self._current_content[start:start + current_remaining])) + + self._current_content_offset += count + self._read_offset += count + if self._progress_hook: + await self._progress_hook(self._read_offset, self.size) + + # If all the data was already downloaded/buffered if self._download_complete: - return self.size + return remaining_size - data_end = self._file_size - if self._end_range is not None: - # Use the length unless it is over the end of the file - data_end = min(self._file_size, self._end_range + 1) + data_start = self._download_start + self._read_offset + data_end = self._download_start + self.size downloader = _AsyncChunkDownloader( client=self._clients.blob, non_empty_ranges=self._non_empty_ranges, total_size=self.size, chunk_size=self._config.max_chunk_get_size, - current_progress=self._first_get_size, - start_range=self._initial_range[1] + 1, # start where the first download ended + current_progress=self._read_offset, + start_range=data_start, end_range=data_end, stream=stream, parallel=parallel, validate_content=self._validate_content, encryption_options=self._encryption_options, + encryption_data=self._encryption_data, use_location=self._location_mode, - **self._request_options) + progress_hook=self._progress_hook, + **self._request_options + ) dl_tasks = downloader.get_chunk_offsets() - running_futures = [ + running_futures = { asyncio.ensure_future(downloader.process_chunk(d)) for d in islice(dl_tasks, 0, self._max_concurrency) - ] + } while running_futures: # Wait for some download to finish before adding a new one done, running_futures = await asyncio.wait( @@ -465,11 +764,11 @@ async def readinto(self, stream): except HttpResponseError as error: process_storage_error(error) try: - next_chunk = next(dl_tasks) + for _ in range(0, len(done)): + next_chunk = next(dl_tasks) + running_futures.add(asyncio.ensure_future(downloader.process_chunk(next_chunk))) except StopIteration: break - else: - running_futures.add(asyncio.ensure_future(downloader.process_chunk(next_chunk))) if running_futures: # Wait for the remaining downloads to finish @@ -479,12 +778,79 @@ async def readinto(self, stream): task.result() except HttpResponseError as error: process_storage_error(error) - return self.size + + self._complete_read() + return remaining_size + + def _complete_read(self): + """Adjusts all offsets to the end of the download.""" + self._download_offset = self.size + self._raw_download_offset = self.size + self._read_offset = self.size + self._current_content_offset = len(self._current_content) + + async def _check_and_report_progress(self): + """Reports progress if necessary.""" + # Only report progress at the end of each chunk and use download_offset to always report + # progress in terms of (approximate) byte count. + if self._progress_hook and self._current_content_offset == len(self._current_content): + await self._progress_hook(self._download_offset, self.size) + + async def content_as_bytes(self, max_concurrency=1): + """DEPRECATED: Download the contents of this file. + + This operation is blocking until all data is downloaded. + + This method is deprecated, use func:`readall` instead. + + :param int max_concurrency: + The number of parallel connections with which to download. + :returns: The contents of the file as bytes. + :rtype: bytes + """ + warnings.warn( + "content_as_bytes is deprecated, use readall instead", + DeprecationWarning + ) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. " + "content_as_bytes is not supported in text mode.") + + self._max_concurrency = max_concurrency + return await self.readall() + + async def content_as_text(self, max_concurrency=1, encoding="UTF-8"): + """DEPRECATED: Download the contents of this blob, and decode as text. + + This operation is blocking until all data is downloaded. + + This method is deprecated, use func:`readall` instead. + + :param int max_concurrency: + The number of parallel connections with which to download. + :param str encoding: + Test encoding to decode the downloaded bytes. Default is UTF-8. + :returns: The content of the file as a str. + :rtype: str + """ + warnings.warn( + "content_as_text is deprecated, use readall instead", + DeprecationWarning + ) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. " + "content_as_text is not supported in text mode.") + + self._max_concurrency = max_concurrency + self._encoding = encoding + return await self.readall() async def download_to_stream(self, stream, max_concurrency=1): - """Download the contents of this blob to a stream. + """DEPRECATED: Download the contents of this blob to a stream. - :param stream: + This method is deprecated, use func:`readinto` instead. + + :param IO[T] stream: The stream to download to. This can be an open file-handle, or any writable stream. The stream must be seekable if the download uses more than one parallel connection. @@ -497,6 +863,10 @@ async def download_to_stream(self, stream, max_concurrency=1): "download_to_stream is deprecated, use readinto instead", DeprecationWarning ) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. " + "download_to_stream is not supported in text mode.") + self._max_concurrency = max_concurrency await self.readinto(stream) return self.properties diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_encryption_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_encryption_async.py new file mode 100644 index 000000000000..97334d96da59 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_encryption_async.py @@ -0,0 +1,72 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import inspect +import sys +from io import BytesIO +from typing import IO + +from .._encryption import _GCM_REGION_DATA_LENGTH, encrypt_data_v2 + + +class GCMBlobEncryptionStream: + """ + An async stream that performs AES-GCM encryption on the given data as + it's streamed. Data is read and encrypted in regions. The stream + will use the same encryption key and will generate a guaranteed unique + nonce for each encryption region. + """ + def __init__( + self, content_encryption_key: bytes, + data_stream: IO[bytes], + ) -> None: + """ + :param bytes content_encryption_key: The encryption key to use. + :param IO[bytes] data_stream: The data stream to read data from. + """ + self.content_encryption_key = content_encryption_key + self.data_stream = data_stream + + self.offset = 0 + self.current = b'' + self.nonce_counter = 0 + + async def read(self, size: int = -1) -> bytes: + """ + Read data from the stream. Specify -1 to read all available data. + + :param int size: The amount of data to read. Defaults to -1 for all data. + :return: The bytes read. + :rtype: bytes + """ + result = BytesIO() + remaining = sys.maxsize if size == -1 else size + + while remaining > 0: + # Start by reading from current + if len(self.current) > 0: + read = min(remaining, len(self.current)) + result.write(self.current[:read]) + + self.current = self.current[read:] + self.offset += read + remaining -= read + + if remaining > 0: + # Read one region of data and encrypt it + data = self.data_stream.read(_GCM_REGION_DATA_LENGTH) + if inspect.isawaitable(data): + data = await data + + if len(data) == 0: + # No more data to read + break + + self.current = encrypt_data_v2(data, self.nonce_counter, self.content_encryption_key) + # IMPORTANT: Must increment the nonce each time. + self.nonce_counter += 1 + + return result.getvalue() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_lease_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_lease_async.py index 79e67337dd62..b5bfad95f53f 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_lease_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_lease_async.py @@ -3,51 +3,57 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=invalid-overridden-method +# pylint: disable=docstring-keyword-should-match-keyword-only -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, IO, Iterable, AnyStr, Dict, List, Tuple, - TypeVar, TYPE_CHECKING -) +import uuid +from typing import Any, Optional, Union, TYPE_CHECKING from azure.core.exceptions import HttpResponseError from azure.core.tracing.decorator_async import distributed_trace_async -from .._shared.response_handlers import return_response_headers, process_storage_error +from .._shared.response_handlers import process_storage_error, return_response_headers from .._serialize import get_modify_conditions -from .._lease import BlobLeaseClient as LeaseClientBase if TYPE_CHECKING: + from azure.storage.blob.aio import BlobClient, ContainerClient from datetime import datetime - from .._generated.operations import BlobOperations, ContainerOperations - BlobClient = TypeVar("BlobClient") - ContainerClient = TypeVar("ContainerClient") -class BlobLeaseClient(LeaseClientBase): +class BlobLeaseClient(): # pylint: disable=client-accepts-api-version-keyword """Creates a new BlobLeaseClient. This client provides lease operations on a BlobClient or ContainerClient. - - :ivar str id: - The ID of the lease currently being maintained. This will be `None` if no - lease has yet been acquired. - :ivar str etag: - The ETag of the lease currently being maintained. This will be `None` if no - lease has yet been acquired or modified. - :ivar ~datetime.datetime last_modified: - The last modified timestamp of the lease currently being maintained. - This will be `None` if no lease has yet been acquired or modified. - - :param client: - The client of the blob or container to lease. - :type client: ~azure.storage.blob.aio.BlobClient or - ~azure.storage.blob.aio.ContainerClient - :param str lease_id: - A string representing the lease ID of an existing lease. This value does not - need to be specified in order to acquire a new lease, or break one. + :param client: The client of the blob or container to lease. + :type client: Union[BlobClient, ContainerClient] + :param lease_id: A string representing the lease ID of an existing lease. This value does not need to be + specified in order to acquire a new lease, or break one. + :type lease_id: Optional[str] """ + id: str + """The ID of the lease currently being maintained. This will be `None` if no + lease has yet been acquired.""" + etag: Optional[str] + """The ETag of the lease currently being maintained. This will be `None` if no + lease has yet been acquired or modified.""" + last_modified: Optional["datetime"] + """The last modified timestamp of the lease currently being maintained. + This will be `None` if no lease has yet been acquired or modified.""" + + def __init__( # pylint: disable=missing-client-constructor-parameter-credential, missing-client-constructor-parameter-kwargs + self, client: Union["BlobClient", "ContainerClient"], + lease_id: Optional[str] = None + ) -> None: + self.id = lease_id or str(uuid.uuid4()) + self.last_modified = None + self.etag = None + if hasattr(client, 'blob_name'): + self._client = client._client.blob + elif hasattr(client, 'container_name'): + self._client = client._client.container + else: + raise TypeError("Lease must use either BlobClient or ContainerClient.") + def __enter__(self): raise TypeError("Async lease must use 'async with'.") @@ -61,8 +67,7 @@ async def __aexit__(self, *args): await self.release() @distributed_trace_async - async def acquire(self, lease_duration=-1, **kwargs): - # type: (int, Any) -> None + async def acquire(self, lease_duration: int = -1, **kwargs: Any) -> None: """Requests a new lease. If the container does not have an active lease, the Blob service creates a @@ -97,12 +102,16 @@ async def acquire(self, lease_duration=-1, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = await self._client.acquire_lease( + response: Any = await self._client.acquire_lease( timeout=kwargs.pop('timeout', None), duration=lease_duration, proposed_lease_id=self.id, @@ -111,13 +120,12 @@ async def acquire(self, lease_duration=-1, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime - self.etag = response.get('etag') # type: str + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') + self.etag = response.get('etag') @distributed_trace_async - async def renew(self, **kwargs): - # type: (Any) -> None + async def renew(self, **kwargs: Any) -> None: """Renews the lease. The lease can be renewed if the lease ID specified in the @@ -150,12 +158,16 @@ async def renew(self, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = await self._client.renew_lease( + response: Any = await self._client.renew_lease( lease_id=self.id, timeout=kwargs.pop('timeout', None), modified_access_conditions=mod_conditions, @@ -163,13 +175,12 @@ async def renew(self, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.etag = response.get('etag') # type: str - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') @distributed_trace_async - async def release(self, **kwargs): - # type: (Any) -> None + async def release(self, **kwargs: Any) -> None: """Release the lease. The lease may be released if the client lease id specified matches @@ -200,12 +211,16 @@ async def release(self, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = await self._client.release_lease( + response: Any = await self._client.release_lease( lease_id=self.id, timeout=kwargs.pop('timeout', None), modified_access_conditions=mod_conditions, @@ -213,13 +228,12 @@ async def release(self, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.etag = response.get('etag') # type: str - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') @distributed_trace_async - async def change(self, proposed_lease_id, **kwargs): - # type: (str, Any) -> None + async def change(self, proposed_lease_id: str, **kwargs: Any) -> None: """Change the lease ID of an active lease. :param str proposed_lease_id: @@ -249,12 +263,16 @@ async def change(self, proposed_lease_id, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = await self._client.change_lease( + response: Any = await self._client.change_lease( lease_id=self.id, proposed_lease_id=proposed_lease_id, timeout=kwargs.pop('timeout', None), @@ -263,13 +281,12 @@ async def change(self, proposed_lease_id, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.etag = response.get('etag') # type: str - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') @distributed_trace_async - async def break_lease(self, lease_break_period=None, **kwargs): - # type: (Optional[int], Any) -> int + async def break_lease(self, lease_break_period: Optional[int] = None, **kwargs: Any) -> int: """Break the lease, if the container or blob has an active lease. Once a lease is broken, it cannot be renewed. Any authorized request can break the lease; @@ -308,13 +325,17 @@ async def break_lease(self, lease_break_period=None, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: Approximate time remaining in the lease period, in seconds. :rtype: int """ mod_conditions = get_modify_conditions(kwargs) try: - response = await self._client.break_lease( + response: Any = await self._client.break_lease( timeout=kwargs.pop('timeout', None), break_period=lease_break_period, modified_access_conditions=mod_conditions, diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_list_blobs_helper.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_list_blobs_helper.py index 058572fd270d..1731a3186c40 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_list_blobs_helper.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_list_blobs_helper.py @@ -1,56 +1,65 @@ -# pylint: disable=too-many-lines # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -from azure.core.async_paging import AsyncPageIterator, AsyncItemPaged +from typing import Callable, List, Optional +from urllib.parse import unquote + +from azure.core.async_paging import AsyncItemPaged, AsyncPageIterator from azure.core.exceptions import HttpResponseError -from .._deserialize import get_blob_properties_from_generated_code -from .._models import BlobProperties + +from .._deserialize import ( + get_blob_properties_from_generated_code, + load_many_xml_nodes, + load_xml_int, + load_xml_string +) from .._generated.models import BlobItemInternal, BlobPrefix as GenBlobPrefix +from .._models import BlobProperties from .._shared.models import DictMixin -from .._shared.response_handlers import return_context_and_deserialized, process_storage_error +from .._shared.response_handlers import ( + process_storage_error, + return_context_and_deserialized, + return_raw_deserialized +) class BlobPropertiesPaged(AsyncPageIterator): - """An Iterable of Blob properties. + """An Iterable of Blob properties.""" + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + container: Optional[str] + """The container that the blobs are listed from.""" + delimiter: Optional[str] + """A delimiting character used for hierarchy listing.""" + command: Callable + """Function to retrieve the next page of items.""" - :ivar str service_endpoint: The service URL. - :ivar str prefix: A blob name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.models.BlobProperties) - :ivar str container: The container that the blobs are listed from. - :ivar str delimiter: A delimiting character used for hierarchy listing. - - :param callable command: Function to retrieve the next page of items. - :param str container: The container that the blobs are listed from. - :param str prefix: Filters the results to return only blobs whose names - begin with the specified prefix. - :param int results_per_page: The maximum number of blobs to retrieve per - call. - :param str continuation_token: An opaque continuation token. - :param str delimiter: - Used to capture blobs whose names begin with the same substring up to - the appearance of the delimiter character. The delimiter may be a single - character or a string. - :param location_mode: Specifies the location the request should be sent to. - This mode only applies for RA-GRS accounts which allow secondary read access. - Options include 'primary' or 'secondary'. - """ def __init__( - self, command, - container=None, - prefix=None, - results_per_page=None, - continuation_token=None, - delimiter=None, - location_mode=None): + self, command: Callable, + container: Optional[str] = None, + prefix: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None, + delimiter: Optional[str] = None, + location_mode: Optional[str] = None, + ) -> None: super(BlobPropertiesPaged, self).__init__( get_next=self._get_next_cb, extract_data=self._extract_data_cb, @@ -92,42 +101,115 @@ def _build_item(self, item): if isinstance(item, BlobProperties): return item if isinstance(item, BlobItemInternal): - blob = get_blob_properties_from_generated_code(item) # pylint: disable=protected-access - blob.container = self.container + blob = get_blob_properties_from_generated_code(item) + blob.container = self.container # type: ignore [assignment] return blob return item +class BlobNamesPaged(AsyncPageIterator): + """An Iterable of Blob names.""" + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of blobs to retrieve per call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + container: Optional[str] + """The container that the blobs are listed from.""" + delimiter: Optional[str] + """A delimiting character used for hierarchy listing.""" + command: Callable + """Function to retrieve the next page of items.""" + + def __init__( + self, command: Callable, + container: Optional[str] = None, + prefix: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None, + location_mode: Optional[str] = None + ) -> None: + super(BlobNamesPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.service_endpoint = None + self.prefix = prefix + self.marker = None + self.results_per_page = results_per_page + self.container = container + self.current_page = None + self.location_mode = location_mode + + async def _get_next_cb(self, continuation_token): + try: + return await self._command( + prefix=self.prefix, + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_raw_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + async def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.service_endpoint = self._response.get('ServiceEndpoint') + self.prefix = load_xml_string(self._response, 'Prefix') + self.marker = load_xml_string(self._response, 'Marker') + self.results_per_page = load_xml_int(self._response, 'MaxResults') + self.container = self._response.get('ContainerName') + + blobs = load_many_xml_nodes(self._response, 'Blob', wrapper='Blobs') + self.current_page = [load_xml_string(blob, 'Name') for blob in blobs] + + next_marker = load_xml_string(self._response, 'NextMarker') + return next_marker or None, self.current_page + + class BlobPrefix(AsyncItemPaged, DictMixin): """An Iterable of Blob properties. Returned from walk_blobs when a delimiter is used. - Can be thought of as a virtual blob directory. - - :ivar str name: The prefix, or "directory name" of the blob. - :ivar str prefix: A blob name prefix being used to filter the list. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str marker: The continuation token of the current page of results. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.models.BlobProperties) - :ivar str container: The container that the blobs are listed from. - :ivar str delimiter: A delimiting character used for hierarchy listing. - :param callable command: Function to retrieve the next page of items. - :param str prefix: Filters the results to return only blobs whose names - begin with the specified prefix. - :param int results_per_page: The maximum number of blobs to retrieve per - call. - :param str marker: An opaque continuation token. - :param str delimiter: - Used to capture blobs whose names begin with the same substring up to - the appearance of the delimiter character. The delimiter may be a single - character or a string. - :param location_mode: Specifies the location the request should be sent to. - This mode only applies for RA-GRS accounts which allow secondary read access. - Options include 'primary' or 'secondary'. - """ + Can be thought of as a virtual blob directory.""" + + name: str + """The prefix, or "directory name" of the blob.""" + service_endpoint: Optional[str] + """The service URL.""" + prefix: str + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + next_marker: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: str + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + delimiter: str + """A delimiting character used for hierarchy listing.""" + command: Callable + """Function to retrieve the next page of items.""" + container: str + """The name of the container.""" + def __init__(self, *args, **kwargs): super(BlobPrefix, self).__init__(*args, page_iterator_class=BlobPrefixPaged, **kwargs) self.name = kwargs.get('prefix') @@ -154,10 +236,14 @@ async def _extract_data_cb(self, get_next_return): def _build_item(self, item): item = super(BlobPrefixPaged, self)._build_item(item) if isinstance(item, GenBlobPrefix): + if item.name.encoded: + name = unquote(item.name.content) + else: + name = item.name.content return BlobPrefix( self._command, container=self.container, - prefix=item.name, + prefix=name, results_per_page=self.results_per_page, location_mode=self.location_mode) return item diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_models.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_models.py index 05edd78e0d9d..27d1d8fa3c0b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_models.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_models.py @@ -3,38 +3,49 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=too-few-public-methods, too-many-instance-attributes -# pylint: disable=super-init-not-called, too-many-lines +# pylint: disable=too-few-public-methods + +from typing import Callable, List, Optional, TYPE_CHECKING from azure.core.async_paging import AsyncPageIterator from azure.core.exceptions import HttpResponseError -from .._deserialize import parse_tags - -from .._models import ContainerProperties, FilteredBlob -from .._shared.response_handlers import return_context_and_deserialized, process_storage_error +from .._deserialize import parse_tags from .._generated.models import FilterBlobItem +from .._models import ContainerProperties, FilteredBlob, parse_page_list +from .._shared.response_handlers import process_storage_error, return_context_and_deserialized + +if TYPE_CHECKING: + from .._models import BlobProperties class ContainerPropertiesPaged(AsyncPageIterator): """An Iterable of Container properties. - :ivar str service_endpoint: The service URL. - :ivar str prefix: A container name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.models.ContainerProperties) - - :param callable command: Function to retrieve the next page of items. - :param str prefix: Filters the results to return only containers whose names + :param Callable command: Function to retrieve the next page of items. + :param Optional[str] prefix: Filters the results to return only containers whose names begin with the specified prefix. - :param int results_per_page: The maximum number of container names to retrieve per + :param Optional[int] results_per_page: The maximum number of container names to retrieve per call. - :param str continuation_token: An opaque continuation token. + :param Optional[str] continuation_token: An opaque continuation token. """ + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A container name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: List[ContainerProperties] + """The current page of listed results.""" + def __init__(self, command, prefix=None, results_per_page=None, continuation_token=None): super(ContainerPropertiesPaged, self).__init__( get_next=self._get_next_cb, @@ -77,32 +88,41 @@ def _build_item(item): class FilteredBlobPaged(AsyncPageIterator): """An Iterable of Blob properties. - :ivar str service_endpoint: The service URL. - :ivar str prefix: A blob name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str continuation_token: The continuation token to retrieve the next page of results. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.BlobProperties) - :ivar str container: The container that the blobs are listed from. - - :param callable command: Function to retrieve the next page of items. - :param str container: The name of the container. - :param int results_per_page: The maximum number of blobs to retrieve per + :param Callable command: Function to retrieve the next page of items. + :param Optional[str] container: The name of the container. + :param Optional[int] results_per_page: The maximum number of blobs to retrieve per call. - :param str continuation_token: An opaque continuation token. - :param location_mode: Specifies the location the request should be sent to. - This mode only applies for RA-GRS accounts which allow secondary read access. - Options include 'primary' or 'secondary'. + :param Optional[str] continuation_token: An opaque continuation token. + :param Optional[str] location_mode: + Specifies the location the request should be sent to. This mode only applies for RA-GRS accounts + which allow secondary read access. Options include 'primary' or 'secondary'. """ + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List["BlobProperties"]] + """The current page of listed results.""" + container: Optional[str] + """The container that the blobs are listed from.""" + def __init__( - self, command, - container=None, - results_per_page=None, - continuation_token=None, - location_mode=None): + self, command: Callable, + container: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None, + location_mode: Optional[str] = None + ) -> None: super(FilteredBlobPaged, self).__init__( get_next=self._get_next_cb, extract_data=self._extract_data_cb, @@ -141,3 +161,39 @@ def _build_item(item): blob = FilteredBlob(name=item.name, container_name=item.container_name, tags=tags) return blob return item + + +class PageRangePaged(AsyncPageIterator): + def __init__(self, command, results_per_page=None, continuation_token=None): + super(PageRangePaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.results_per_page = results_per_page + self.location_mode = None + self.current_page = [] + + async def _get_next_cb(self, continuation_token): + try: + return await self._command( + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + async def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.current_page = self._build_page(self._response) + + return self._response.next_marker or None, self.current_page + + @staticmethod + def _build_page(response): + if not response: + raise StopIteration + + return parse_page_list(response) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_upload_helpers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_upload_helpers.py index 36d1e4498e5e..794beee36e3b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_upload_helpers.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/aio/_upload_helpers.py @@ -3,71 +3,87 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=no-self-use +import inspect from io import SEEK_SET, UnsupportedOperation -from typing import Optional, Union, Any, TypeVar, TYPE_CHECKING # pylint: disable=unused-import +from typing import Any, cast, Dict, IO, Optional, TypeVar, TYPE_CHECKING -import six -from azure.core.exceptions import ResourceModifiedError, HttpResponseError +from azure.core.exceptions import HttpResponseError, ResourceModifiedError -from .._shared.response_handlers import ( - process_storage_error, - return_response_headers) +from ._encryption_async import GCMBlobEncryptionStream +from .._encryption import ( + encrypt_blob, + get_adjusted_upload_size, + get_blob_encryptor_and_padder, + generate_blob_encryption_data, + _ENCRYPTION_PROTOCOL_V1, + _ENCRYPTION_PROTOCOL_V2 +) +from .._generated.models import ( + AppendPositionAccessConditions, + BlockLookupList, + ModifiedAccessConditions +) +from .._shared.response_handlers import process_storage_error, return_response_headers from .._shared.uploads_async import ( - upload_data_chunks, - upload_substream_blocks, + AppendBlobChunkUploader, BlockBlobChunkUploader, PageBlobChunkUploader, - AppendBlobChunkUploader) -from .._shared.encryption import generate_blob_encryption_data, encrypt_blob -from .._generated.models import ( - BlockLookupList, - AppendPositionAccessConditions, - ModifiedAccessConditions, + upload_data_chunks, + upload_substream_blocks ) -from .._upload_helpers import _convert_mod_error, _any_conditions +from .._upload_helpers import _any_conditions, _convert_mod_error if TYPE_CHECKING: - from datetime import datetime # pylint: disable=unused-import + from .._generated.aio.operations import AppendBlobOperations, BlockBlobOperations, PageBlobOperations + from .._shared.models import StorageConfiguration BlobLeaseClient = TypeVar("BlobLeaseClient") -async def upload_block_blob( # pylint: disable=too-many-locals - client=None, - data=None, - stream=None, - length=None, - overwrite=None, - headers=None, - validate_content=None, - max_concurrency=None, - blob_settings=None, - encryption_options=None, - **kwargs): +async def upload_block_blob( # pylint: disable=too-many-locals, too-many-statements + client: "BlockBlobOperations", + stream: IO, + overwrite: bool, + encryption_options: Dict[str, Any], + blob_settings: "StorageConfiguration", + headers: Dict[str, Any], + validate_content: bool, + max_concurrency: Optional[int], + length: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: try: if not overwrite and not _any_conditions(**kwargs): kwargs['modified_access_conditions'].if_none_match = '*' adjusted_count = length if (encryption_options.get('key') is not None) and (adjusted_count is not None): - adjusted_count += (16 - (length % 16)) + adjusted_count = get_adjusted_upload_size(adjusted_count, encryption_options['version']) blob_headers = kwargs.pop('blob_headers', None) tier = kwargs.pop('standard_blob_tier', None) blob_tags_string = kwargs.pop('blob_tags_string', None) + immutability_policy = kwargs.pop('immutability_policy', None) + immutability_policy_expiry = None if immutability_policy is None else immutability_policy.expiry_time + immutability_policy_mode = None if immutability_policy is None else immutability_policy.policy_mode + legal_hold = kwargs.pop('legal_hold', None) + progress_hook = kwargs.pop('progress_hook', None) + # Do single put if the size is smaller than config.max_single_put_size if adjusted_count is not None and (adjusted_count <= blob_settings.max_single_put_size): - try: - data = data.read(length) - if not isinstance(data, six.binary_type): - raise TypeError('Blob data should be of type bytes.') - except AttributeError: - pass + data = stream.read(length or -1) + if inspect.isawaitable(data): + data = await data + if not isinstance(data, bytes): + raise TypeError('Blob data should be of type bytes.') + if encryption_options.get('key'): - encryption_data, data = encrypt_blob(data, encryption_options['key']) + if not isinstance(data, bytes): + raise TypeError('Blob data should be of type bytes.') + encryption_data, data = encrypt_blob(data, encryption_options['key'], encryption_options['version']) headers['x-ms-meta-encryptiondata'] = encryption_data - return await client.upload( - body=data, + + response = cast(Dict[str, Any], await client.upload( + body=data, # type: ignore [arg-type] content_length=adjusted_count, blob_http_headers=blob_headers, headers=headers, @@ -77,7 +93,15 @@ async def upload_block_blob( # pylint: disable=too-many-locals upload_stream_current=0, tier=tier.value if tier else None, blob_tags_string=blob_tags_string, - **kwargs) + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + **kwargs)) + + if progress_hook: + await progress_hook(adjusted_count, adjusted_count) + + return response use_original_upload_path = blob_settings.use_byte_buffer or \ validate_content or encryption_options.get('required') or \ @@ -86,20 +110,37 @@ async def upload_block_blob( # pylint: disable=too-many-locals not hasattr(stream, 'seek') or not hasattr(stream, 'tell') if use_original_upload_path: - if encryption_options.get('key'): - cek, iv, encryption_data = generate_blob_encryption_data(encryption_options['key']) - headers['x-ms-meta-encryptiondata'] = encryption_data - encryption_options['cek'] = cek - encryption_options['vector'] = iv + total_size = length + encryptor, padder = None, None + if encryption_options and encryption_options.get('key'): + cek, iv, encryption_metadata = generate_blob_encryption_data( + encryption_options['key'], + encryption_options['version']) + headers['x-ms-meta-encryptiondata'] = encryption_metadata + + if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V1: + encryptor, padder = get_blob_encryptor_and_padder(cek, iv, True) + + # Adjust total_size for encryption V2 + if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V2: + # Adjust total_size for encryption V2 + total_size = adjusted_count + # V2 wraps the data stream with an encryption stream + if cek is None: + raise ValueError("Generate encryption metadata failed. 'cek' is None.") + stream = GCMBlobEncryptionStream(cek, stream) # type: ignore [assignment] + block_ids = await upload_data_chunks( service=client, uploader_class=BlockBlobChunkUploader, - total_size=length, + total_size=total_size, chunk_size=blob_settings.max_block_size, max_concurrency=max_concurrency, stream=stream, validate_content=validate_content, - encryption_options=encryption_options, + progress_hook=progress_hook, + encryptor=encryptor, + padder=padder, headers=headers, **kwargs ) @@ -112,13 +153,14 @@ async def upload_block_blob( # pylint: disable=too-many-locals max_concurrency=max_concurrency, stream=stream, validate_content=validate_content, + progress_hook=progress_hook, headers=headers, **kwargs ) block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) block_lookup.latest = block_ids - return await client.commit_block_list( + return cast(Dict[str, Any], await client.commit_block_list( block_lookup, blob_http_headers=blob_headers, cls=return_response_headers, @@ -126,7 +168,10 @@ async def upload_block_blob( # pylint: disable=too-many-locals headers=headers, tier=tier.value if tier else None, blob_tags_string=blob_tags_string, - **kwargs) + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + **kwargs)) except HttpResponseError as error: try: process_storage_error(error) @@ -137,48 +182,63 @@ async def upload_block_blob( # pylint: disable=too-many-locals async def upload_page_blob( - client=None, - stream=None, - length=None, - overwrite=None, - headers=None, - validate_content=None, - max_concurrency=None, - blob_settings=None, - encryption_options=None, - **kwargs): + client: "PageBlobOperations", + overwrite: bool, + encryption_options: Dict[str, Any], + blob_settings: "StorageConfiguration", + headers: Dict[str, Any], + stream: IO, + length: Optional[int] = None, + validate_content: Optional[bool] = None, + max_concurrency: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: try: if not overwrite and not _any_conditions(**kwargs): kwargs['modified_access_conditions'].if_none_match = '*' if length is None or length < 0: raise ValueError("A content length must be specified for a Page Blob.") if length % 512 != 0: - raise ValueError("Invalid page blob size: {0}. " - "The size must be aligned to a 512-byte boundary.".format(length)) + raise ValueError(f"Invalid page blob size: {length}. " + "The size must be aligned to a 512-byte boundary.") + tier = None if kwargs.get('premium_page_blob_tier'): premium_page_blob_tier = kwargs.pop('premium_page_blob_tier') try: - headers['x-ms-access-tier'] = premium_page_blob_tier.value + tier = premium_page_blob_tier.value except AttributeError: - headers['x-ms-access-tier'] = premium_page_blob_tier - if encryption_options and encryption_options.get('data'): - headers['x-ms-meta-encryptiondata'] = encryption_options['data'] + tier = premium_page_blob_tier + + if encryption_options and encryption_options.get('key'): + cek, iv, encryption_data = generate_blob_encryption_data( + encryption_options['key'], + encryption_options['version']) + headers['x-ms-meta-encryptiondata'] = encryption_data + blob_tags_string = kwargs.pop('blob_tags_string', None) + progress_hook = kwargs.pop('progress_hook', None) - response = await client.create( + response = cast(Dict[str, Any], await client.create( content_length=0, blob_content_length=length, - blob_sequence_number=None, + blob_sequence_number=None, # type: ignore [arg-type] blob_http_headers=kwargs.pop('blob_headers', None), blob_tags_string=blob_tags_string, + tier=tier, cls=return_response_headers, headers=headers, - **kwargs) + **kwargs)) if length == 0: - return response + return cast(Dict[str, Any], response) + + if encryption_options and encryption_options.get('key'): + if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V1: + encryptor, padder = get_blob_encryptor_and_padder(cek, iv, False) + kwargs['encryptor'] = encryptor + kwargs['padder'] = padder kwargs['modified_access_conditions'] = ModifiedAccessConditions(if_match=response['etag']) - return await upload_data_chunks( + return cast(Dict[str, Any], await upload_data_chunks( service=client, uploader_class=PageBlobChunkUploader, total_size=length, @@ -186,9 +246,9 @@ async def upload_page_blob( stream=stream, max_concurrency=max_concurrency, validate_content=validate_content, - encryption_options=encryption_options, + progress_hook=progress_hook, headers=headers, - **kwargs) + **kwargs)) except HttpResponseError as error: try: @@ -200,16 +260,17 @@ async def upload_page_blob( async def upload_append_blob( # pylint: disable=unused-argument - client=None, - stream=None, - length=None, - overwrite=None, - headers=None, - validate_content=None, - max_concurrency=None, - blob_settings=None, - encryption_options=None, - **kwargs): + client: "AppendBlobOperations", + overwrite: bool, + encryption_options: Dict[str, Any], + blob_settings: "StorageConfiguration", + headers: Dict[str, Any], + stream: IO, + length: Optional[int] = None, + validate_content: Optional[bool] = None, + max_concurrency: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: try: if length == 0: return {} @@ -218,6 +279,7 @@ async def upload_append_blob( # pylint: disable=unused-argument max_size=kwargs.pop('maxsize_condition', None), append_position=None) blob_tags_string = kwargs.pop('blob_tags_string', None) + progress_hook = kwargs.pop('progress_hook', None) try: if overwrite: @@ -227,7 +289,7 @@ async def upload_append_blob( # pylint: disable=unused-argument headers=headers, blob_tags_string=blob_tags_string, **kwargs) - return await upload_data_chunks( + return cast(Dict[str, Any], await upload_data_chunks( service=client, uploader_class=AppendBlobChunkUploader, total_size=length, @@ -236,26 +298,27 @@ async def upload_append_blob( # pylint: disable=unused-argument max_concurrency=max_concurrency, validate_content=validate_content, append_position_access_conditions=append_conditions, + progress_hook=progress_hook, headers=headers, - **kwargs) + **kwargs)) except HttpResponseError as error: - if error.response.status_code != 404: + if error.response.status_code != 404: # type: ignore [union-attr] raise # rewind the request body if it is a stream if hasattr(stream, 'read'): try: # attempt to rewind the body to the initial position stream.seek(0, SEEK_SET) - except UnsupportedOperation: + except UnsupportedOperation as exc: # if body is not seekable, then retry would not work - raise error + raise error from exc await client.create( content_length=0, blob_http_headers=blob_headers, headers=headers, blob_tags_string=blob_tags_string, **kwargs) - return await upload_data_chunks( + return cast(Dict[str, Any], await upload_data_chunks( service=client, uploader_class=AppendBlobChunkUploader, total_size=length, @@ -264,7 +327,8 @@ async def upload_append_blob( # pylint: disable=unused-argument max_concurrency=max_concurrency, validate_content=validate_content, append_position_access_conditions=append_conditions, + progress_hook=progress_hook, headers=headers, - **kwargs) + **kwargs)) except HttpResponseError as error: process_storage_error(error) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/py.typed b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/storage/blob/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/setup.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/setup.py index f6df420e3e0e..12d856074b95 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/setup.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/setup.py @@ -74,9 +74,10 @@ python_requires=">=3.8", install_requires=[ # dependencies for the vendored storage blob - "azure-core>=1.20.1", - "msrest>=0.6.18", + "azure-core>=1.30.0", "cryptography>=2.1.4", + "typing-extensions>=4.6.0", + "isodate>=0.6.1", # end of dependencies for the vendored storage blob 'azure-eventhub>=5.0.0', 'aiohttp>=3.8.3', diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/CHANGELOG.md b/sdk/eventhub/azure-eventhub-checkpointstoreblob/CHANGELOG.md index b19f65ea9305..61fa3f45dd3c 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/CHANGELOG.md +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/CHANGELOG.md @@ -9,6 +9,11 @@ This version and all future versions will require Python 3.7+. Python 2.7 and 3. - Fixed a bug with `BlobCheckpointStore.claim_ownership` mutating the `ownership_list` argument to no longer mutate the argument. - Updated `azure-core` dependency to 1.20.1 to fix `cchardet` ImportError. +### Other Changes + +- Updated vendor azure-storage-blob dependency to v12.24.0. + - Fixed typing/linting issues and other bugs. See azure-storage-blob CHANGELOG.md for more info. + ## 1.1.4 (2021-04-07) This version and all future versions will require Python 2.7 or Python 3.6+, Python 3.5 is no longer supported. diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/__init__.py index 59cb70146572..0d1f7edf5dc6 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/__init__.py @@ -1 +1 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: str +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/__init__.py index 59cb70146572..0d1f7edf5dc6 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/__init__.py @@ -1 +1 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: str +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/__init__.py index 9164961ea10a..2386595611bd 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/__init__.py @@ -3,9 +3,11 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-keyword-should-match-keyword-only + import os -from typing import Union, Iterable, AnyStr, IO, Any, Dict # pylint: disable=unused-import +from typing import Any, AnyStr, cast, Dict, IO, Iterable, Optional, Union, TYPE_CHECKING from ._version import VERSION from ._blob_client import BlobClient from ._container_client import ContainerClient @@ -16,21 +18,21 @@ from ._shared_access_signature import generate_account_sas, generate_container_sas, generate_blob_sas from ._shared.policies import ExponentialRetry, LinearRetry from ._shared.response_handlers import PartialBatchErrorException -from ._shared.models import( +from ._shared.models import ( LocationMode, ResourceTypes, AccountSasPermissions, StorageErrorCode, - UserDelegationKey -) -from ._generated.models import ( - RehydratePriority + UserDelegationKey, + Services ) +from ._generated.models import RehydratePriority from ._models import ( BlobType, BlockState, StandardBlobTier, PremiumPageBlobTier, + BlobImmutabilityPolicyMode, SequenceNumberAction, PublicAccess, BlobAnalyticsLogging, @@ -54,22 +56,27 @@ BlobQueryError, DelimitedJsonDialect, DelimitedTextDialect, + QuickQueryDialect, ArrowDialect, ArrowType, ObjectReplicationPolicy, - ObjectReplicationRule + ObjectReplicationRule, + ImmutabilityPolicy, ) from ._list_blobs_helper import BlobPrefix +if TYPE_CHECKING: + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential + __version__ = VERSION def upload_blob_to_url( - blob_url, # type: str - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - credential=None, # type: Any - **kwargs): - # type: (...) -> Dict[str, Any] + blob_url: str, + data: Union[Iterable[AnyStr], IO[AnyStr]], + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any +) -> Dict[str, Any]: """Upload data to a given URL The data will be uploaded as a block blob. @@ -82,10 +89,17 @@ def upload_blob_to_url( :param credential: The credentials with which to authenticate. This is optional if the blob URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob_to_url will overwrite any existing data. If set to False, the @@ -112,21 +126,26 @@ def upload_blob_to_url( :rtype: dict(str, Any) """ with BlobClient.from_blob_url(blob_url, credential=credential) as client: - return client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs) + return cast(BlobClient, client).upload_blob(data=data, blob_type=BlobType.BLOCKBLOB, **kwargs) -def _download_to_stream(client, handle, **kwargs): - """Download data to specified open file-handle.""" +def _download_to_stream(client: BlobClient, handle: IO[bytes], **kwargs: Any) -> None: + """ + Download data to specified open file-handle. + + :param BlobClient client: The BlobClient to download with. + :param Stream handle: A Stream to download the data into. + """ stream = client.download_blob(**kwargs) stream.readinto(handle) def download_blob_from_url( - blob_url, # type: str - output, # type: str - credential=None, # type: Any - **kwargs): - # type: (...) -> None + blob_url: str, + output: Union[str, IO[bytes]], + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any +) -> None: """Download the contents of a blob to a local file or stream. :param str blob_url: @@ -138,10 +157,17 @@ def download_blob_from_url( :param credential: The credentials with which to authenticate. This is optional if the blob URL already has a SAS token or the blob is public. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None :keyword bool overwrite: Whether the local file should be overwritten if it already exists. The default value is `False` - in which case a ValueError will be raised if the file already exists. If set to @@ -169,10 +195,10 @@ def download_blob_from_url( overwrite = kwargs.pop('overwrite', False) with BlobClient.from_blob_url(blob_url, credential=credential) as client: if hasattr(output, 'write'): - _download_to_stream(client, output, **kwargs) + _download_to_stream(client, cast(IO[bytes], output), **kwargs) else: if not overwrite and os.path.isfile(output): - raise ValueError("The file '{}' already exists.".format(output)) + raise ValueError(f"The file '{output}' already exists.") with open(output, 'wb') as file_handle: _download_to_stream(client, file_handle, **kwargs) @@ -194,6 +220,8 @@ def download_blob_from_url( 'StandardBlobTier', 'PremiumPageBlobTier', 'SequenceNumberAction', + 'BlobImmutabilityPolicyMode', + 'ImmutabilityPolicy', 'PublicAccess', 'BlobAnalyticsLogging', 'Metrics', @@ -210,6 +238,7 @@ def download_blob_from_url( 'BlobBlock', 'PageRange', 'AccessPolicy', + 'QuickQueryDialect', 'ContainerSasPermissions', 'BlobSasPermissions', 'ResourceTypes', @@ -229,5 +258,6 @@ def download_blob_from_url( 'ArrowType', 'BlobQueryReader', 'ObjectReplicationPolicy', - 'ObjectReplicationRule' + 'ObjectReplicationRule', + 'Services', ] diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_client.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_client.py index f3d2d16564a3..90049ff88e32 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_client.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_client.py @@ -3,78 +3,99 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=too-many-lines,no-self-use +# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only + +import warnings +from datetime import datetime from functools import partial -from io import BytesIO -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, IO, Iterable, AnyStr, Dict, List, Tuple, +from typing import ( + Any, AnyStr, cast, Dict, IO, Iterable, List, Optional, overload, Tuple, Union, TYPE_CHECKING ) -try: - from urllib.parse import urlparse, quote, unquote -except ImportError: - from urlparse import urlparse # type: ignore - from urllib2 import quote, unquote # type: ignore +from typing_extensions import Self -import six +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError +from azure.core.paging import ItemPaged +from azure.core.pipeline import Pipeline from azure.core.tracing.decorator import distributed_trace -from azure.core.exceptions import ResourceNotFoundError, HttpResponseError - -from ._shared import encode_base64 -from ._shared.base_client import StorageAccountHostsMixin, parse_connection_str, parse_query -from ._shared.encryption import generate_blob_encryption_data -from ._shared.uploads import IterStreamer -from ._shared.request_handlers import ( - add_metadata_headers, get_length, read_length, - validate_and_format_range_headers) -from ._shared.response_handlers import return_response_headers, process_storage_error, return_headers_and_deserialized +from ._blob_client_helpers import ( + _abort_copy_options, + _append_block_from_url_options, + _append_block_options, + _clear_page_options, + _commit_block_list_options, + _create_append_blob_options, + _create_page_blob_options, + _create_snapshot_options, + _delete_blob_options, + _download_blob_options, + _format_url, + _from_blob_url, + _get_blob_tags_options, + _get_block_list_result, + _get_page_ranges_options, + _parse_url, + _quick_query_options, + _resize_blob_options, + _seal_append_blob_options, + _set_blob_metadata_options, + _set_blob_tags_options, + _set_http_headers_options, + _set_sequence_number_options, + _stage_block_from_url_options, + _stage_block_options, + _start_copy_from_url_options, + _upload_blob_from_url_options, + _upload_blob_options, + _upload_page_options, + _upload_pages_from_url_options +) +from ._deserialize import ( + deserialize_blob_properties, + deserialize_pipeline_response_into_cls, + get_page_ranges_result, + parse_tags +) +from ._download import StorageStreamDownloader +from ._encryption import StorageEncryptionMixin, _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION from ._generated import AzureBlobStorage -from ._generated.models import ( # pylint: disable=unused-import - DeleteSnapshotsOptionType, - BlobHTTPHeaders, - BlockLookupList, - AppendPositionAccessConditions, - SequenceNumberAccessConditions, - QueryRequest, - CpkInfo) +from ._generated.models import CpkInfo +from ._lease import BlobLeaseClient +from ._models import BlobBlock, BlobProperties, BlobQueryError, BlobType, PageRange, PageRangePaged +from ._quick_query_helper import BlobQueryReader +from ._shared.base_client import parse_connection_str, StorageAccountHostsMixin, TransportWrapper +from ._shared.response_handlers import process_storage_error, return_response_headers from ._serialize import ( - get_modify_conditions, - get_source_conditions, - get_cpk_scope_info, + get_access_conditions, get_api_version, - serialize_blob_tags_header, - serialize_blob_tags, - serialize_query_format, get_access_conditions + get_modify_conditions, + get_version_id ) -from ._deserialize import get_page_ranges_result, deserialize_blob_properties, deserialize_blob_stream, parse_tags, \ - deserialize_pipeline_response_into_cls -from ._quick_query_helper import BlobQueryReader from ._upload_helpers import ( - upload_block_blob, upload_append_blob, - upload_page_blob, _any_conditions) -from ._models import BlobType, BlobBlock, BlobProperties, BlobQueryError -from ._download import StorageStreamDownloader -from ._lease import BlobLeaseClient + upload_block_blob, + upload_page_blob +) if TYPE_CHECKING: - from datetime import datetime - from ._generated.models import BlockList - from ._models import ( # pylint: disable=unused-import + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential + from azure.storage.blob import ContainerClient + from ._models import ( ContentSettings, + ImmutabilityPolicy, PremiumPageBlobTier, - StandardBlobTier, - SequenceNumberAction + SequenceNumberAction, + StandardBlobTier ) -_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION = ( - 'The require_encryption flag is set, but encryption is not supported' - ' for this method.') - -class BlobClient(StorageAccountHostsMixin): # pylint: disable=too-many-public-methods +class BlobClient(StorageAccountHostsMixin, StorageEncryptionMixin): # pylint: disable=too-many-public-methods """A client to interact with a specific blob, although that blob may not yet exist. + For more optional configuration, please click + `here `__. + :param str account_url: The URI to the storage account. In order to create a client given the full URI to the blob, use the :func:`from_blob_url` classmethod. @@ -89,13 +110,15 @@ class BlobClient(StorageAccountHostsMixin): # pylint: disable=too-many-public-m :param credential: The credentials with which to authenticate. This is optional if the account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. :keyword str api_version: - The Storage API version to use for requests. Default value is '2019-07-07'. - Setting to an older version may result in reduced feature compatibility. + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. .. versionadded:: 12.2.0 @@ -114,6 +137,11 @@ class BlobClient(StorageAccountHostsMixin): # pylint: disable=too-many-public-m the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, or 4MB. + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. .. admonition:: Example: @@ -132,69 +160,52 @@ class BlobClient(StorageAccountHostsMixin): # pylint: disable=too-many-public-m :caption: Creating the BlobClient from a SAS URL to a blob. """ def __init__( - self, account_url, # type: str - container_name, # type: str - blob_name, # type: str - snapshot=None, # type: Optional[Union[str, Dict[str, Any]]] - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None - try: - if not account_url.lower().startswith('http'): - account_url = "https://" + account_url - except AttributeError: - raise ValueError("Account URL must be a string.") - parsed_url = urlparse(account_url.rstrip('/')) - - if not (container_name and blob_name): - raise ValueError("Please specify a container name and blob name.") - if not parsed_url.netloc: - raise ValueError("Invalid URL: {}".format(account_url)) - - path_snapshot, sas_token = parse_query(parsed_url.query) - + self, account_url: str, + container_name: str, + blob_name: str, + snapshot: Optional[Union[str, Dict[str, Any]]] = None, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: + parsed_url, sas_token, path_snapshot = _parse_url( + account_url=account_url, + container_name=container_name, + blob_name=blob_name) self.container_name = container_name self.blob_name = blob_name - try: - self.snapshot = snapshot.snapshot # type: ignore - except AttributeError: - try: - self.snapshot = snapshot['snapshot'] # type: ignore - except TypeError: - self.snapshot = snapshot or path_snapshot + if snapshot is not None and hasattr(snapshot, 'snapshot'): + self.snapshot = snapshot.snapshot + elif isinstance(snapshot, dict): + self.snapshot = snapshot['snapshot'] + else: + self.snapshot = snapshot or path_snapshot + self.version_id = kwargs.pop('version_id', None) + + # This parameter is used for the hierarchy traversal. Give precedence to credential. + self._raw_credential = credential if credential else sas_token self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot) super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access - - def _format_url(self, hostname): - container_name = self.container_name - if isinstance(container_name, six.text_type): - container_name = container_name.encode('UTF-8') - return "{}://{}/{}/{}{}".format( - self.scheme, - hostname, - quote(container_name), - quote(self.blob_name, safe='~/'), - self._query_str) - - def _encode_source_url(self, source_url): - parsed_source_url = urlparse(source_url) - source_scheme = parsed_source_url.scheme - source_hostname = parsed_source_url.netloc.rstrip('/') - source_path = unquote(parsed_source_url.path) - source_query = parsed_source_url.query - result = ["{}://{}{}".format(source_scheme, source_hostname, quote(source_path, safe='~/'))] - if source_query: - result.append(source_query) - return '?'.join(result) + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._configure_encryption(kwargs) + + def _format_url(self, hostname: str) -> str: + return _format_url( + container_name=self.container_name, + scheme=self.scheme, + blob_name=self.blob_name, + query_str=self._query_str, + hostname=hostname + ) @classmethod - def from_blob_url(cls, blob_url, credential=None, snapshot=None, **kwargs): - # type: (str, Optional[Any], Optional[Union[str, Dict[str, Any]]], Any) -> BlobClient + def from_blob_url( + cls, blob_url: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + snapshot: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> Self: """Create BlobClient from a blob url. This doesn't support customized blob url with '/' in blob name. :param str blob_url: @@ -205,58 +216,30 @@ def from_blob_url(cls, blob_url, credential=None, snapshot=None, **kwargs): The credentials with which to authenticate. This is optional if the account URL already has a SAS token, or the connection string already has shared access key values. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account shared access - key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None :param str snapshot: The optional blob snapshot on which to operate. This can be the snapshot ID string or the response returned from :func:`create_snapshot`. If specified, this will override the snapshot in the url. + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. :returns: A Blob client. :rtype: ~azure.storage.blob.BlobClient """ - try: - if not blob_url.lower().startswith('http'): - blob_url = "https://" + blob_url - except AttributeError: - raise ValueError("Blob URL must be a string.") - parsed_url = urlparse(blob_url.rstrip('/')) - - if not parsed_url.netloc: - raise ValueError("Invalid URL: {}".format(blob_url)) - - account_path = "" - if ".core." in parsed_url.netloc: - # .core. is indicating non-customized url. Blob name with directory info can also be parsed. - path_blob = parsed_url.path.lstrip('/').split('/', 1) - elif "localhost" in parsed_url.netloc or "127.0.0.1" in parsed_url.netloc: - path_blob = parsed_url.path.lstrip('/').split('/', 2) - account_path += '/' + path_blob[0] - else: - # for customized url. blob name that has directory info cannot be parsed. - path_blob = parsed_url.path.lstrip('/').split('/') - if len(path_blob) > 2: - account_path = "/" + "/".join(path_blob[:-2]) - account_url = "{}://{}{}?{}".format( - parsed_url.scheme, - parsed_url.netloc.rstrip('/'), - account_path, - parsed_url.query) - container_name, blob_name = unquote(path_blob[-2]), unquote(path_blob[-1]) - if not container_name or not blob_name: - raise ValueError("Invalid URL. Provide a blob_url with a valid blob and container name.") - - path_snapshot, _ = parse_query(parsed_url.query) - if snapshot: - try: - path_snapshot = snapshot.snapshot # type: ignore - except AttributeError: - try: - path_snapshot = snapshot['snapshot'] # type: ignore - except TypeError: - path_snapshot = snapshot - + account_url, container_name, blob_name, path_snapshot = _from_blob_url(blob_url=blob_url, snapshot=snapshot) return cls( account_url, container_name=container_name, blob_name=blob_name, snapshot=path_snapshot, credential=credential, **kwargs @@ -264,13 +247,13 @@ def from_blob_url(cls, blob_url, credential=None, snapshot=None, **kwargs): @classmethod def from_connection_string( - cls, conn_str, # type: str - container_name, # type: str - blob_name, # type: str - snapshot=None, # type: Optional[str] - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): # type: (...) -> BlobClient + cls, conn_str: str, + container_name: str, + blob_name: str, + snapshot: Optional[Union[str, Dict[str, Any]]] = None, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: """Create BlobClient from a Connection String. :param str conn_str: @@ -286,9 +269,21 @@ def from_connection_string( The credentials with which to authenticate. This is optional if the account URL already has a SAS token, or the connection string already has shared access key values. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account shared access - key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. Credentials provided here will take precedence over those in the connection string. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. :returns: A Blob client. :rtype: ~azure.storage.blob.BlobClient @@ -310,8 +305,7 @@ def from_connection_string( ) @distributed_trace - def get_account_information(self, **kwargs): - # type: (**Any) -> Dict[str, str] + def get_account_information(self, **kwargs: Any) -> Dict[str, str]: """Gets information related to the storage account in which the blob resides. The information can also be retrieved if the user has a SAS to a container or blob. @@ -321,141 +315,17 @@ def get_account_information(self, **kwargs): :rtype: dict(str, str) """ try: - return self._client.blob.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + return cast(Dict[str, str], self._client.blob.get_account_info(cls=return_response_headers, **kwargs)) except HttpResponseError as error: process_storage_error(error) - def _upload_blob_options( # pylint:disable=too-many-statements - self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] - length=None, # type: Optional[int] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption and not self.key_encryption_key: - raise ValueError("Encryption required but no key was provided.") - encryption_options = { - 'required': self.require_encryption, - 'key': self.key_encryption_key, - 'resolver': self.key_resolver_function, - } - if self.key_encryption_key is not None: - cek, iv, encryption_data = generate_blob_encryption_data(self.key_encryption_key) - encryption_options['cek'] = cek - encryption_options['vector'] = iv - encryption_options['data'] = encryption_data - - encoding = kwargs.pop('encoding', 'UTF-8') - if isinstance(data, six.text_type): - data = data.encode(encoding) # type: ignore - if length is None: - length = get_length(data) - if isinstance(data, bytes): - data = data[:length] - - if isinstance(data, bytes): - stream = BytesIO(data) - elif hasattr(data, 'read'): - stream = data - elif hasattr(data, '__iter__'): - stream = IterStreamer(data, encoding=encoding) - else: - raise TypeError("Unsupported data type: {}".format(type(data))) - - validate_content = kwargs.pop('validate_content', False) - content_settings = kwargs.pop('content_settings', None) - overwrite = kwargs.pop('overwrite', False) - max_concurrency = kwargs.pop('max_concurrency', 1) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - kwargs['cpk_info'] = cpk_info - - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - kwargs['lease_access_conditions'] = get_access_conditions(kwargs.pop('lease', None)) - kwargs['modified_access_conditions'] = get_modify_conditions(kwargs) - kwargs['cpk_scope_info'] = get_cpk_scope_info(kwargs) - if content_settings: - kwargs['blob_headers'] = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=bytearray(content_settings.content_md5) if content_settings.content_md5 else None, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - kwargs['blob_tags_string'] = serialize_blob_tags_header(kwargs.pop('tags', None)) - kwargs['stream'] = stream - kwargs['length'] = length - kwargs['overwrite'] = overwrite - kwargs['headers'] = headers - kwargs['validate_content'] = validate_content - kwargs['blob_settings'] = self._config - kwargs['max_concurrency'] = max_concurrency - kwargs['encryption_options'] = encryption_options - if blob_type == BlobType.BlockBlob: - kwargs['client'] = self._client.block_blob - kwargs['data'] = data - elif blob_type == BlobType.PageBlob: - kwargs['client'] = self._client.page_blob - elif blob_type == BlobType.AppendBlob: - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - kwargs['client'] = self._client.append_blob - else: - raise ValueError("Unsupported BlobType: {}".format(blob_type)) - return kwargs - - def _upload_blob_from_url_options(self, source_url, **kwargs): - # type: (...) -> Dict[str, Any] - tier = kwargs.pop('standard_blob_tier', None) - overwrite = kwargs.pop('overwrite', False) - content_settings = kwargs.pop('content_settings', None) - if content_settings: - kwargs['blob_http_headers'] = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=None, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'content_length': 0, - 'copy_source_blob_properties': kwargs.pop('include_source_blob_properties', True), - 'source_content_md5': kwargs.pop('source_content_md5', None), - 'copy_source': source_url, - 'modified_access_conditions': get_modify_conditions(kwargs), - 'blob_tags_string': serialize_blob_tags_header(kwargs.pop('tags', None)), - 'cls': return_response_headers, - 'lease_access_conditions': get_access_conditions(kwargs.pop('destination_lease', None)), - 'tier': tier.value if tier else None, - 'source_modified_access_conditions': get_source_conditions(kwargs), - 'cpk_info': cpk_info, - 'cpk_scope_info': get_cpk_scope_info(kwargs) - } - options.update(kwargs) - if not overwrite and not _any_conditions(**options): # pylint: disable=protected-access - options['modified_access_conditions'].if_none_match = '*' - return options - @distributed_trace - def upload_blob_from_url(self, source_url, **kwargs): - # type: (str, Any) -> Dict[str, Any] + def upload_blob_from_url( + self, source_url: str, + *, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Any]: """ Creates a new Block Blob where the content of the blob is read from a given URL. The content of an existing blob is overwritten with the new blob. @@ -463,15 +333,17 @@ def upload_blob_from_url(self, source_url, **kwargs): :param str source_url: A URL of up to 2 KB in length that specifies a file or blob. The value should be URL-encoded as it would appear in a request URI. - If the source is in another account, the source must either be public - or must be authenticated via a shared access signature. If the source - is public, no authentication is required. + The source must either be public or must be authenticated via a shared + access signature as part of the url or using the source_authorization keyword. + If the source is public, no authentication is required. Examples: https://myaccount.blob.core.windows.net/mycontainer/myblob https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken + :keyword dict(str, str) metadata: + Name-value pairs associated with the blob as metadata. :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob will overwrite the existing data. If set to False, the operation will fail with ResourceExistsError. @@ -482,7 +354,7 @@ def upload_blob_from_url(self, source_url, **kwargs): The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) :paramtype tags: dict(str, str) :keyword bytearray source_content_md5: Specify the md5 that is used to verify the integrity of the source bytes. @@ -526,7 +398,11 @@ def upload_blob_from_url(self, source_url, **kwargs): valid, the operation fails with status code 412 (Precondition Failed). :paramtype destination_lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword ~azure.storage.blob.ContentSettings content_settings: ContentSettings object used to set blob properties. Used to set content type, encoding, language, disposition, md5, and cache control. @@ -543,27 +419,35 @@ def upload_blob_from_url(self, source_url, **kwargs): :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: A standard blob tier value to set the blob to. For this version of the library, this is only applicable to block blobs on standard storage accounts. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Blob-updated property Dict (Etag and last modified) + :rtype: Dict[str, Any] """ - options = self._upload_blob_from_url_options( - source_url=self._encode_source_url(source_url), + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_blob_from_url_options( + source_url=source_url, + metadata=metadata, **kwargs) try: - return self._client.block_blob.put_blob_from_url(**options) + return cast(Dict[str, Any], self._client.block_blob.put_blob_from_url(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def upload_blob( # pylint: disable=too-many-locals - self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] - length=None, # type: Optional[int] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Any + def upload_blob( + self, data: Union[bytes, str, Iterable[AnyStr], IO[bytes]], + blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, + length: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Creates a new blob from a data source with automatic chunking. :param data: The blob data to upload. + :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. :param int length: @@ -577,7 +461,7 @@ def upload_blob( # pylint: disable=too-many-locals The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -635,6 +519,20 @@ def upload_blob( # pylint: disable=too-many-locals :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: A standard blob tier value to set the blob to. For this version of the library, this is only applicable to block blobs on standard storage accounts. + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword int maxsize_condition: Optional conditional header. The max length in bytes permitted for the append blob. If the Append Block operation would cause the blob @@ -642,8 +540,9 @@ def upload_blob( # pylint: disable=too-many-locals value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). :keyword int max_concurrency: - Maximum number of parallel connections to use when the blob size exceeds - 64MB. + Maximum number of parallel connections to use when transferring the blob in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: Encrypts the data on the service-side with the given key. Use of customer-provided keys must be done over HTTPS. @@ -659,12 +558,20 @@ def upload_blob( # pylint: disable=too-many-locals :keyword str encoding: Defaults to UTF-8. + :keyword progress_hook: + A callback to track the progress of a long running upload. The signature is + function(current: int, total: Optional[int]) where current is the number of bytes transferred + so far, and total is the size of the blob or None if the size is unknown. + :paramtype progress_hook: Callable[[int, Optional[int]], None] :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make - multiple calls to the Azure service and the timeout will apply to - each call individually. - :returns: Blob-updated property dict (Etag and last modified) - :rtype: dict[str, Any] + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. + :returns: Blob-updated property Dict (Etag and last modified) + :rtype: Dict[str, Any] .. admonition:: Example: @@ -675,11 +582,24 @@ def upload_blob( # pylint: disable=too-many-locals :dedent: 12 :caption: Upload a blob to the container. """ - options = self._upload_blob_options( - data, + if self.require_encryption and not self.key_encryption_key: + raise ValueError("Encryption required but no key was provided.") + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_blob_options( + data=data, blob_type=blob_type, length=length, metadata=metadata, + encryption_options={ + 'required': self.require_encryption, + 'version': self.encryption_version, + 'key': self.key_encryption_key, + 'resolver': self.key_resolver_function + }, + config=self._config, + sdk_moniker=self._sdk_moniker, + client=self._client, **kwargs) if blob_type == BlobType.BlockBlob: return upload_block_blob(**options) @@ -687,56 +607,37 @@ def upload_blob( # pylint: disable=too-many-locals return upload_page_blob(**options) return upload_append_blob(**options) - def _download_blob_options(self, offset=None, length=None, **kwargs): - # type: (Optional[int], Optional[int], **Any) -> Dict[str, Any] - if self.require_encryption and not self.key_encryption_key: - raise ValueError("Encryption required but no key was provided.") - if length is not None and offset is None: - raise ValueError("Offset value must not be None if length is set.") - if length is not None: - length = offset + length - 1 # Service actually uses an end-range inclusive index - - validate_content = kwargs.pop('validate_content', False) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'clients': self._client, - 'config': self._config, - 'start_range': offset, - 'end_range': length, - 'version_id': kwargs.pop('version_id', None), - 'validate_content': validate_content, - 'encryption_options': { - 'required': self.require_encryption, - 'key': self.key_encryption_key, - 'resolver': self.key_resolver_function}, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'cls': kwargs.pop('cls', None) or deserialize_blob_stream, - 'max_concurrency':kwargs.pop('max_concurrency', 1), - 'encoding': kwargs.pop('encoding', None), - 'timeout': kwargs.pop('timeout', None), - 'name': self.blob_name, - 'container': self.container_name} - options.update(kwargs) - return options + @overload + def download_blob( + self, offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: str, + **kwargs: Any + ) -> StorageStreamDownloader[str]: + ... + + @overload + def download_blob( + self, offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: None = None, + **kwargs: Any + ) -> StorageStreamDownloader[bytes]: + ... @distributed_trace - def download_blob(self, offset=None, length=None, **kwargs): - # type: (Optional[int], Optional[int], **Any) -> StorageStreamDownloader + def download_blob( + self, offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: Union[str, None] = None, + **kwargs: Any + ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: """Downloads a blob to the StorageStreamDownloader. The readall() method must be used to read all the content or readinto() must be used to download the blob into - a stream. + a stream. Using chunks() returns an iterator which allows the user to iterate over the content in chunks. :param int offset: Start of byte range to use for downloading a section of the blob. @@ -749,6 +650,7 @@ def download_blob(self, offset=None, length=None, **kwargs): value that, when present, specifies the version of the blob to download. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword bool validate_content: @@ -794,11 +696,23 @@ def download_blob(self, offset=None, length=None, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int max_concurrency: - The number of parallel connections with which to download. - :keyword str encoding: + Maximum number of parallel connections to use when transferring the blob in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. + :keyword Optional[str] encoding: Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword progress_hook: + A callback to track the progress of a long running download. The signature is + function(current: int, total: int) where current is the number of bytes transferred + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], None] :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. multiple calls to the Azure service and the timeout will apply to each call individually. :returns: A streaming object (StorageStreamDownloader) @@ -813,87 +727,60 @@ def download_blob(self, offset=None, length=None, **kwargs): :dedent: 12 :caption: Download a blob. """ - options = self._download_blob_options( + if self.require_encryption and not (self.key_encryption_key or self.key_resolver_function): + raise ValueError("Encryption required but no key was provided.") + if length is not None and offset is None: + raise ValueError("Offset value must not be None if length is set.") + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _download_blob_options( + blob_name=self.blob_name, + container_name=self.container_name, + version_id=get_version_id(self.version_id, kwargs), offset=offset, length=length, + encoding=encoding, + encryption_options={ + 'required': self.require_encryption, + 'version': self.encryption_version, + 'key': self.key_encryption_key, + 'resolver': self.key_resolver_function + }, + config=self._config, + sdk_moniker=self._sdk_moniker, + client=self._client, **kwargs) return StorageStreamDownloader(**options) - def _quick_query_options(self, query_expression, - **kwargs): - # type: (str, **Any) -> Dict[str, Any] - delimiter = '\n' - input_format = kwargs.pop('blob_format', None) - if input_format: - try: - delimiter = input_format.lineterminator - except AttributeError: - try: - delimiter = input_format.delimiter - except AttributeError: - raise ValueError("The Type of blob_format can only be DelimitedTextDialect or DelimitedJsonDialect") - output_format = kwargs.pop('output_format', None) - if output_format: - try: - delimiter = output_format.lineterminator - except AttributeError: - try: - delimiter = output_format.delimiter - except AttributeError: - pass - else: - output_format = input_format - query_request = QueryRequest( - expression=query_expression, - input_serialization=serialize_query_format(input_format), - output_serialization=serialize_query_format(output_format) - ) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo( - encryption_key=cpk.key_value, - encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm - ) - options = { - 'query_request': query_request, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'snapshot': self.snapshot, - 'timeout': kwargs.pop('timeout', None), - 'cls': return_headers_and_deserialized, - } - options.update(kwargs) - return options, delimiter - @distributed_trace - def query_blob(self, query_expression, **kwargs): - # type: (str, **Any) -> BlobQueryReader + def query_blob(self, query_expression: str, **kwargs: Any) -> BlobQueryReader: """Enables users to select/project on blob/or blob snapshot data by providing simple query expressions. This operations returns a BlobQueryReader, users need to use readall() or readinto() to get query data. :param str query_expression: - Required. a query statement. + Required. a query statement. For more details see + https://learn.microsoft.com/azure/storage/blobs/query-acceleration-sql-reference. :keyword Callable[~azure.storage.blob.BlobQueryError] on_error: A function to be called on any processing errors returned by the service. :keyword blob_format: Optional. Defines the serialization of the data currently stored in the blob. The default is to treat the blob data as CSV data formatted in the default dialect. This can be overridden with - a custom DelimitedTextDialect, or alternatively a DelimitedJsonDialect. + a custom DelimitedTextDialect, or DelimitedJsonDialect or "ParquetDialect" (passed as a string or enum). + These dialects can be passed through their respective classes, the QuickQueryDialect enum or as a string + + .. note:: + "ParquetDialect" is in preview, so some features may not work as intended. + :paramtype blob_format: ~azure.storage.blob.DelimitedTextDialect or ~azure.storage.blob.DelimitedJsonDialect + or ~azure.storage.blob.QuickQueryDialect or str :keyword output_format: Optional. Defines the output serialization for the data stream. By default the data will be returned - as it is represented in the blob. By providing an output format, the blob data will be reformatted - according to that profile. This value can be a DelimitedTextDialect or a DelimitedJsonDialect. - :paramtype output_format: ~azure.storage.blob.DelimitedTextDialect, ~azure.storage.blob.DelimitedJsonDialect - or list[~azure.storage.blob.ArrowDialect] + as it is represented in the blob (Parquet formats default to DelimitedTextDialect). + By providing an output format, the blob data will be reformatted according to that profile. + This value can be a DelimitedTextDialect or a DelimitedJsonDialect or ArrowDialect. + These dialects can be passed through their respective classes, the QuickQueryDialect enum or as a string + :paramtype output_format: ~azure.storage.blob.DelimitedTextDialect or ~azure.storage.blob.DelimitedJsonDialect + or List[~azure.storage.blob.ArrowDialect] or ~azure.storage.blob.QuickQueryDialect or str :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -927,7 +814,11 @@ def query_blob(self, query_expression, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A streaming object (BlobQueryReader) :rtype: ~azure.storage.blob.BlobQueryReader @@ -943,7 +834,9 @@ def query_blob(self, query_expression, **kwargs): errors = kwargs.pop("on_error", None) error_cls = kwargs.pop("error_cls", BlobQueryError) encoding = kwargs.pop("encoding", None) - options, delimiter = self._quick_query_options(query_expression, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options, delimiter = _quick_query_options(self.snapshot, query_expression, **kwargs) try: headers, raw_response_body = self._client.blob.query(**options) except HttpResponseError as error: @@ -958,35 +851,8 @@ def query_blob(self, query_expression, **kwargs): response=raw_response_body, error_cls=error_cls) - @staticmethod - def _generic_delete_blob_options(delete_snapshots=False, **kwargs): - # type: (bool, **Any) -> Dict[str, Any] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if delete_snapshots: - delete_snapshots = DeleteSnapshotsOptionType(delete_snapshots) - options = { - 'timeout': kwargs.pop('timeout', None), - 'snapshot': kwargs.pop('snapshot', None), # this is added for delete_blobs - 'delete_snapshots': delete_snapshots or None, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions} - options.update(kwargs) - return options - - def _delete_blob_options(self, delete_snapshots=False, **kwargs): - # type: (bool, **Any) -> Dict[str, Any] - if self.snapshot and delete_snapshots: - raise ValueError("The delete_snapshots option cannot be used with a specific snapshot.") - options = self._generic_delete_blob_options(delete_snapshots, **kwargs) - options['snapshot'] = self.snapshot - options['version_id'] = kwargs.pop('version_id', None) - options['blob_delete_type'] = kwargs.pop('blob_delete_type', None) - return options - @distributed_trace - def delete_blob(self, delete_snapshots=False, **kwargs): - # type: (str, **Any) -> None + def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: Any) -> None: """Marks the specified blob for deletion. The blob is later deleted during garbage collection. @@ -1000,15 +866,16 @@ def delete_blob(self, delete_snapshots=False, **kwargs): Soft deleted blob is accessible through :func:`~ContainerClient.list_blobs()` specifying `include=['deleted']` option. Soft-deleted blob can be restored using :func:`undelete` operation. - :param str delete_snapshots: + :param Optional[str] delete_snapshots: Required if the blob has associated snapshots. Values include: - "only": Deletes only the blobs snapshots. - "include": Deletes the blob along with all snapshots. - :keyword str version_id: + :keyword Optional[str] version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to delete. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword lease: @@ -1040,7 +907,11 @@ def delete_blob(self, delete_snapshots=False, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -1052,22 +923,33 @@ def delete_blob(self, delete_snapshots=False, **kwargs): :dedent: 12 :caption: Delete a blob. """ - options = self._delete_blob_options(delete_snapshots=delete_snapshots, **kwargs) + options = _delete_blob_options( + snapshot=self.snapshot, + version_id=get_version_id(self.version_id, kwargs), + delete_snapshots=delete_snapshots, + **kwargs) try: self._client.blob.delete(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def undelete_blob(self, **kwargs): - # type: (**Any) -> None + def undelete_blob(self, **kwargs: Any) -> None: """Restores soft-deleted blobs or snapshots. Operation will only be successful if used within the specified number of days set in the delete retention policy. + If blob versioning is enabled, the base blob cannot be restored using this + method. Instead use :func:`start_copy_from_url` with the URL of the blob version + you wish to promote to the current version. + :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -1084,25 +966,34 @@ def undelete_blob(self, **kwargs): except HttpResponseError as error: process_storage_error(error) - @distributed_trace() - def exists(self, **kwargs): - # type: (**Any) -> bool + @distributed_trace + def exists(self, **kwargs: Any) -> bool: """ Returns True if a blob exists with the defined parameters, and returns False otherwise. - :param str version_id: + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to check if it exists. - :param int timeout: - The timeout parameter is expressed in seconds. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: boolean + :rtype: bool """ + version_id = get_version_id(self.version_id, kwargs) try: self._client.blob.get_properties( snapshot=self.snapshot, + version_id=version_id, **kwargs) return True + # Encrypted with CPK + except ResourceExistsError: + return True except HttpResponseError as error: try: process_storage_error(error) @@ -1110,8 +1001,7 @@ def exists(self, **kwargs): return False @distributed_trace - def get_blob_properties(self, **kwargs): - # type: (**Any) -> BlobProperties + def get_blob_properties(self, **kwargs: Any) -> BlobProperties: """Returns all user-defined metadata, standard HTTP properties, and system properties for the blob. It does not return the content of the blob. @@ -1124,6 +1014,7 @@ def get_blob_properties(self, **kwargs): value that, when present, specifies the version of the blob to get properties. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword ~datetime.datetime if_modified_since: @@ -1155,7 +1046,11 @@ def get_blob_properties(self, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: BlobProperties :rtype: ~azure.storage.blob.BlobProperties @@ -1171,6 +1066,7 @@ def get_blob_properties(self, **kwargs): # TODO: extract this out as _get_blob_properties_options access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) + version_id = get_version_id(self.version_id, kwargs) cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: @@ -1182,49 +1078,25 @@ def get_blob_properties(self, **kwargs): cls_method = kwargs.pop('cls', None) if cls_method: kwargs['cls'] = partial(deserialize_pipeline_response_into_cls, cls_method) - blob_props = self._client.blob.get_properties( + blob_props = cast(BlobProperties, self._client.blob.get_properties( timeout=kwargs.pop('timeout', None), - version_id=kwargs.pop('version_id', None), + version_id=version_id, snapshot=self.snapshot, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, cls=kwargs.pop('cls', None) or deserialize_blob_properties, cpk_info=cpk_info, - **kwargs) + **kwargs)) except HttpResponseError as error: process_storage_error(error) blob_props.name = self.blob_name if isinstance(blob_props, BlobProperties): blob_props.container = self.container_name blob_props.snapshot = self.snapshot - return blob_props # type: ignore - - def _set_http_headers_options(self, content_settings=None, **kwargs): - # type: (Optional[ContentSettings], **Any) -> Dict[str, Any] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - blob_headers = None - if content_settings: - blob_headers = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=bytearray(content_settings.content_md5) if content_settings.content_md5 else None, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - options = { - 'timeout': kwargs.pop('timeout', None), - 'blob_http_headers': blob_headers, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) - return options + return blob_props @distributed_trace - def set_http_headers(self, content_settings=None, **kwargs): - # type: (Optional[ContentSettings], **Any) -> None + def set_http_headers(self, content_settings: Optional["ContentSettings"] = None, **kwargs: Any) -> Dict[str, Any]: """Sets system properties on the blob. If one property is set for the content_settings, all properties will be overridden. @@ -1260,45 +1132,25 @@ def set_http_headers(self, content_settings=None, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Any] """ - options = self._set_http_headers_options(content_settings=content_settings, **kwargs) + options = _set_http_headers_options(content_settings=content_settings, **kwargs) try: - return self._client.blob.set_http_headers(**options) # type: ignore + return cast(Dict[str, Any], self._client.blob.set_http_headers(**options)) except HttpResponseError as error: process_storage_error(error) - def _set_blob_metadata_options(self, metadata=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) - return options - @distributed_trace - def set_blob_metadata(self, metadata=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Union[str, datetime]] + def set_blob_metadata( + self, metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets user-defined metadata for the blob as one or more name-value pairs. :param metadata: @@ -1347,83 +1199,114 @@ def set_blob_metadata(self, metadata=None, **kwargs): .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified) + :rtype: Dict[str, Union[str, datetime]] """ - options = self._set_blob_metadata_options(metadata=metadata, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _set_blob_metadata_options(metadata=metadata, **kwargs) try: - return self._client.blob.set_metadata(**options) # type: ignore + return cast(Dict[str, Union[str, datetime]], self._client.blob.set_metadata(**options)) except HttpResponseError as error: process_storage_error(error) - def _create_page_blob_options( # type: ignore - self, size, # type: int - content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] - premium_page_blob_tier=None, # type: Optional[Union[str, PremiumPageBlobTier]] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - blob_headers = None - if content_settings: - blob_headers = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=bytearray(content_settings.content_md5) if content_settings.content_md5 else None, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) + @distributed_trace + def set_immutability_policy( + self, immutability_policy: "ImmutabilityPolicy", + **kwargs: Any + ) -> Dict[str, str]: + """The Set Immutability Policy operation sets the immutability policy on the blob. - sequence_number = kwargs.pop('sequence_number', None) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. - if premium_page_blob_tier: - try: - headers['x-ms-access-tier'] = premium_page_blob_tier.value # type: ignore - except AttributeError: - headers['x-ms-access-tier'] = premium_page_blob_tier # type: ignore - - blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) - - options = { - 'content_length': 0, - 'blob_content_length': size, - 'blob_sequence_number': sequence_number, - 'blob_http_headers': blob_headers, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'blob_tags_string': blob_tags_string, - 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) - return options + :param ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + version_id = get_version_id(self.version_id, kwargs) + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + return cast(Dict[str, str], self._client.blob.set_immutability_policy( + cls=return_response_headers, version_id=version_id, **kwargs)) @distributed_trace - def create_page_blob( # type: ignore - self, size, # type: int - content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] - premium_page_blob_tier=None, # type: Optional[Union[str, PremiumPageBlobTier]] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + def delete_immutability_policy(self, **kwargs: Any) -> None: + """The Delete Immutability Policy operation deletes the immutability policy on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + version_id = get_version_id(self.version_id, kwargs) + self._client.blob.delete_immutability_policy(version_id=version_id, **kwargs) + + @distributed_trace + def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str, datetime, bool]]: + """The Set Legal Hold operation sets a legal hold on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :param bool legal_hold: + Specified if a legal hold should be set on the blob. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, Union[str, datetime, bool]] + """ + + version_id = get_version_id(self.version_id, kwargs) + return cast(Dict[str, Union[str, datetime, bool]], self._client.blob.set_legal_hold( + legal_hold, version_id=version_id, cls=return_response_headers, **kwargs)) + + @distributed_trace + def create_page_blob( + self, size: int, + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Creates a new Page Blob of the specified size. :param int size: @@ -1444,7 +1327,7 @@ def create_page_blob( # type: ignore The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -1457,6 +1340,18 @@ def create_page_blob( # type: ignore Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -1488,68 +1383,38 @@ def create_page_blob( # type: ignore .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict[str, Any] """ - options = self._create_page_blob_options( - size, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _create_page_blob_options( + size=size, content_settings=content_settings, metadata=metadata, premium_page_blob_tier=premium_page_blob_tier, **kwargs) try: - return self._client.page_blob.create(**options) # type: ignore + return cast(Dict[str, Any], self._client.page_blob.create(**options)) except HttpResponseError as error: process_storage_error(error) - def _create_append_blob_options(self, content_settings=None, metadata=None, **kwargs): - # type: (Optional[ContentSettings], Optional[Dict[str, str]], **Any) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - blob_headers = None - if content_settings: - blob_headers = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=bytearray(content_settings.content_md5) if content_settings.content_md5 else None, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) - - options = { - 'content_length': 0, - 'blob_http_headers': blob_headers, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'blob_tags_string': blob_tags_string, - 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) - return options - @distributed_trace - def create_append_blob(self, content_settings=None, metadata=None, **kwargs): - # type: (Optional[ContentSettings], Optional[Dict[str, str]], **Any) -> Dict[str, Union[str, datetime]] - """Creates a new Append Blob. + def create_append_blob( + self, content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: + """Creates a new Append Blob. This operation creates a new 0-length append blob. The content + of any existing blob is overwritten with the newly initialized append blob. To add content to + the append blob, call the :func:`append_block` or :func:`append_block_from_url` method. :param ~azure.storage.blob.ContentSettings content_settings: ContentSettings object used to set blob properties. Used to set content type, encoding, @@ -1562,7 +1427,7 @@ def create_append_blob(self, content_settings=None, metadata=None, **kwargs): The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -1571,6 +1436,18 @@ def create_append_blob(self, content_settings=None, metadata=None, **kwargs): Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -1602,48 +1479,32 @@ def create_append_blob(self, content_settings=None, metadata=None, **kwargs): .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict[str, Any] """ - options = self._create_append_blob_options( + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _create_append_blob_options( content_settings=content_settings, metadata=metadata, **kwargs) try: - return self._client.append_blob.create(**options) # type: ignore + return cast(Dict[str, Union[str, datetime]], self._client.append_blob.create(**options)) except HttpResponseError as error: process_storage_error(error) - def _create_snapshot_options(self, metadata=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - 'headers': headers} - options.update(kwargs) - return options - @distributed_trace - def create_snapshot(self, metadata=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Union[str, datetime]] + def create_snapshot( + self, metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Creates a snapshot of the blob. A snapshot is a read-only version of a blob that's taken at a point in time. @@ -1697,7 +1558,11 @@ def create_snapshot(self, metadata=None, **kwargs): .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Snapshot ID, Etag, and last modified). :rtype: dict[str, Any] @@ -1710,58 +1575,29 @@ def create_snapshot(self, metadata=None, **kwargs): :dedent: 8 :caption: Create a snapshot of the blob. """ - options = self._create_snapshot_options(metadata=metadata, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _create_snapshot_options(metadata=metadata, **kwargs) try: - return self._client.blob.create_snapshot(**options) # type: ignore + return cast(Dict[str, Any], self._client.blob.create_snapshot(**options)) except HttpResponseError as error: process_storage_error(error) - def _start_copy_from_url_options(self, source_url, metadata=None, incremental_copy=False, **kwargs): - # type: (str, Optional[Dict[str, str]], bool, **Any) -> Dict[str, Any] - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - if 'source_lease' in kwargs: - source_lease = kwargs.pop('source_lease') - try: - headers['x-ms-source-lease-id'] = source_lease.id # type: str - except AttributeError: - headers['x-ms-source-lease-id'] = source_lease - - tier = kwargs.pop('premium_page_blob_tier', None) or kwargs.pop('standard_blob_tier', None) - - if kwargs.get('requires_sync'): - headers['x-ms-requires-sync'] = str(kwargs.pop('requires_sync')) - - timeout = kwargs.pop('timeout', None) - dest_mod_conditions = get_modify_conditions(kwargs) - blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) - - options = { - 'copy_source': source_url, - 'seal_blob': kwargs.pop('seal_destination_blob', None), - 'timeout': timeout, - 'modified_access_conditions': dest_mod_conditions, - 'blob_tags_string': blob_tags_string, - 'headers': headers, - 'cls': return_response_headers, - } - if not incremental_copy: - source_mod_conditions = get_source_conditions(kwargs) - dest_access_conditions = get_access_conditions(kwargs.pop('destination_lease', None)) - options['source_modified_access_conditions'] = source_mod_conditions - options['lease_access_conditions'] = dest_access_conditions - options['tier'] = tier.value if tier else None - options.update(kwargs) - return options - @distributed_trace - def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, **kwargs): - # type: (str, Optional[Dict[str, str]], bool, **Any) -> Dict[str, Union[str, datetime]] - """Copies a blob asynchronously. - - This operation returns a copy operation - object that can be used to wait on the completion of the operation, - as well as check status or abort the copy operation. + def start_copy_from_url( + self, source_url: str, + metadata: Optional[Dict[str, str]] = None, + incremental_copy: bool = False, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: + """Copies a blob from the given URL. + + This operation returns a dictionary containing `copy_status` and `copy_id`, + which can be used to check the status of or abort the copy operation. + `copy_status` will be 'success' if the copy completed synchronously or + 'pending' if the copy has been started asynchronously. For asynchronous copies, + the status can be checked by polling the :func:`get_blob_properties` method and + checking the copy status. Set `requires_sync` to True to force the copy to be synchronous. The Blob service copies blobs on a best-effort basis. The source blob for a copy operation may be a block blob, an append blob, @@ -1784,10 +1620,6 @@ def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, end of the copy operation, the destination blob will have the same committed block count as the source. - For all blob types, you can call status() on the returned polling object - to check the status of the copy operation, or wait() to block until the - operation is complete. The final blob will be committed when the copy completes. - :param str source_url: A URL of up to 2 KB in length that specifies a file or blob. The value should be URL-encoded as it would appear in a request URI. @@ -1818,11 +1650,26 @@ def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_). + + The (case-sensitive) literal "COPY" can instead be passed to copy tags from the source blob. + This option is only available when `incremental_copy=False` and `requires_sync=True`. .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) + :paramtype tags: dict(str, str) or Literal["COPY"] + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword ~datetime.datetime source_if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -1871,7 +1718,11 @@ def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, the lease ID given matches the active lease ID of the source blob. :paramtype source_lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -1888,8 +1739,23 @@ def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, :keyword bool requires_sync: Enforces that the service will not return a response until the copy is complete. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. This option is only available when `incremental_copy` is + set to False and `requires_sync` is set to True. + + .. versionadded:: 12.9.0 + + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the sync copied blob. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.10.0 + :returns: A dictionary of copy properties (etag, last_modified, copy_id, copy_status). - :rtype: dict[str, str or ~datetime.datetime] + :rtype: dict[str, Union[str, ~datetime.datetime]] .. admonition:: Example: @@ -1900,38 +1766,23 @@ def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, :dedent: 12 :caption: Copy a blob from a URL. """ - options = self._start_copy_from_url_options( - source_url=self._encode_source_url(source_url), + options = _start_copy_from_url_options( + source_url=source_url, metadata=metadata, incremental_copy=incremental_copy, **kwargs) try: if incremental_copy: - return self._client.page_blob.copy_incremental(**options) - return self._client.blob.start_copy_from_url(**options) + return cast(Dict[str, Union[str, datetime]], self._client.page_blob.copy_incremental(**options)) + return cast(Dict[str, Union[str, datetime]], self._client.blob.start_copy_from_url(**options)) except HttpResponseError as error: process_storage_error(error) - def _abort_copy_options(self, copy_id, **kwargs): - # type: (Union[str, Dict[str, Any], BlobProperties], **Any) -> Dict[str, Any] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - try: - copy_id = copy_id.copy.id - except AttributeError: - try: - copy_id = copy_id['copy_id'] - except TypeError: - pass - options = { - 'copy_id': copy_id, - 'lease_access_conditions': access_conditions, - 'timeout': kwargs.pop('timeout', None)} - options.update(kwargs) - return options - @distributed_trace - def abort_copy(self, copy_id, **kwargs): - # type: (Union[str, Dict[str, Any], BlobProperties], **Any) -> None + def abort_copy( + self, copy_id: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any + ) -> None: """Abort an ongoing copy operation. This will leave a destination blob with zero length and full metadata. @@ -1952,15 +1803,14 @@ def abort_copy(self, copy_id, **kwargs): :dedent: 12 :caption: Abort copying a blob from URL. """ - options = self._abort_copy_options(copy_id, **kwargs) + options = _abort_copy_options(copy_id, **kwargs) try: self._client.blob.abort_copy_from_url(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): - # type: (int, Optional[str], **Any) -> BlobLeaseClient + def acquire_lease(self, lease_duration: int =-1, lease_id: Optional[str] = None, **kwargs: Any) -> BlobLeaseClient: """Requests a new lease. If the blob does not have an active lease, the Blob @@ -1999,7 +1849,11 @@ def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A BlobLeaseClient object. :rtype: ~azure.storage.blob.BlobLeaseClient @@ -2012,13 +1866,12 @@ def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): :dedent: 8 :caption: Acquiring a lease on a blob. """ - lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore + lease = BlobLeaseClient(self, lease_id=lease_id) lease.acquire(lease_duration=lease_duration, **kwargs) return lease @distributed_trace - def set_standard_blob_tier(self, standard_blob_tier, **kwargs): - # type: (Union[str, StandardBlobTier], Any) -> None + def set_standard_blob_tier(self, standard_blob_tier: Union[str, "StandardBlobTier"], **kwargs: Any) -> None: """This operation sets the tier on a block blob. A block blob's tier determines Hot/Cool/Archive storage type. @@ -2039,14 +1892,20 @@ def set_standard_blob_tier(self, standard_blob_tier, **kwargs): value that, when present, specifies the version of the blob to download. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword str if_tags_match_condition: Specify a SQL where clause on blob tags to operate only on blob with a matching value. eg. ``\"\\\"tagname\\\"='my tag'\"`` .. versionadded:: 12.4.0 + :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -2055,6 +1914,7 @@ def set_standard_blob_tier(self, standard_blob_tier, **kwargs): """ access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) + version_id = get_version_id(self.version_id, kwargs) if standard_blob_tier is None: raise ValueError("A StandardBlobTier must be specified") if self.snapshot and kwargs.get('version_id'): @@ -2066,69 +1926,25 @@ def set_standard_blob_tier(self, standard_blob_tier, **kwargs): timeout=kwargs.pop('timeout', None), modified_access_conditions=mod_conditions, lease_access_conditions=access_conditions, + version_id=version_id, **kwargs) except HttpResponseError as error: process_storage_error(error) - def _stage_block_options( - self, block_id, # type: str - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - block_id = encode_base64(str(block_id)) - if isinstance(data, six.text_type): - data = data.encode(kwargs.pop('encoding', 'UTF-8')) # type: ignore - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - if length is None: - length = get_length(data) - if length is None: - length, data = read_length(data) - if isinstance(data, bytes): - data = data[:length] - - validate_content = kwargs.pop('validate_content', False) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'block_id': block_id, - 'content_length': length, - 'body': data, - 'transactional_content_md5': None, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'validate_content': validate_content, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - } - options.update(kwargs) - return options - @distributed_trace def stage_block( - self, block_id, # type: str - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Dict[str, Any] + self, block_id: str, + data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + length: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Creates a new block to be committed as part of a blob. :param str block_id: A string value that identifies the block. The string should be less than or equal to 64 bytes in size. For a given blob, the block_id must be the same size for each block. :param data: The blob data. + :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] :param int length: Size of the block. :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the blob. The storage @@ -2159,72 +1975,37 @@ def stage_block( .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob property dict. :rtype: dict[str, Any] """ - options = self._stage_block_options( - block_id, - data, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _stage_block_options( + block_id=block_id, + data=data, length=length, **kwargs) try: - return self._client.block_blob.stage_block(**options) + return cast(Dict[str, Any], self._client.block_blob.stage_block(**options)) except HttpResponseError as error: process_storage_error(error) - def _stage_block_from_url_options( - self, block_id, # type: str - source_url, # type: str - source_offset=None, # type: Optional[int] - source_length=None, # type: Optional[int] - source_content_md5=None, # type: Optional[Union[bytes, bytearray]] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if source_length is not None and source_offset is None: - raise ValueError("Source offset value must not be None if length is set.") - if source_length is not None: - source_length = source_offset + source_length - 1 - block_id = encode_base64(str(block_id)) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - range_header = None - if source_offset is not None: - range_header, _ = validate_and_format_range_headers(source_offset, source_length) - - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'block_id': block_id, - 'content_length': 0, - 'source_url': source_url, - 'source_range': range_header, - 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - } - options.update(kwargs) - return options - @distributed_trace def stage_block_from_url( - self, block_id, # type: str - source_url, # type: str - source_offset=None, # type: Optional[int] - source_length=None, # type: Optional[int] - source_content_md5=None, # type: Optional[Union[bytes, bytearray]] - **kwargs - ): - # type: (...) -> Dict[str, Any] + self, block_id: str, + source_url: str, + source_offset: Optional[int] = None, + source_length: Optional[int] = None, + source_content_md5: Optional[Union[bytes, bytearray]] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Creates a new block to be committed as part of a blob where the contents are read from a URL. @@ -2257,35 +2038,36 @@ def stage_block_from_url( .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. :returns: Blob property dict. :rtype: dict[str, Any] """ - options = self._stage_block_from_url_options( - block_id, - source_url=self._encode_source_url(source_url), + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _stage_block_from_url_options( + block_id=block_id, + source_url=source_url, source_offset=source_offset, source_length=source_length, source_content_md5=source_content_md5, **kwargs) try: - return self._client.block_blob.stage_block_from_url(**options) + return cast(Dict[str, Any], self._client.block_blob.stage_block_from_url(**options)) except HttpResponseError as error: process_storage_error(error) - def _get_block_list_result(self, blocks): - # type: (BlockList) -> Tuple[List[BlobBlock], List[BlobBlock]] - committed = [] # type: List - uncommitted = [] # type: List - if blocks.committed_blocks: - committed = [BlobBlock._from_generated(b) for b in blocks.committed_blocks] # pylint: disable=protected-access - if blocks.uncommitted_blocks: - uncommitted = [BlobBlock._from_generated(b) for b in blocks.uncommitted_blocks] # pylint: disable=protected-access - return committed, uncommitted - @distributed_trace - def get_block_list(self, block_list_type="committed", **kwargs): - # type: (Optional[str], **Any) -> Tuple[List[BlobBlock], List[BlobBlock]] + def get_block_list( + self, block_list_type: str = "committed", + **kwargs: Any + ) -> Tuple[List[BlobBlock], List[BlobBlock]]: """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block blob. @@ -2303,9 +2085,13 @@ def get_block_list(self, block_list_type="committed", **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A tuple of two lists - committed and uncommitted blocks - :rtype: tuple(list(~azure.storage.blob.BlobBlock), list(~azure.storage.blob.BlobBlock)) + :rtype: Tuple[List[BlobBlock], List[BlobBlock]] """ access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) @@ -2319,81 +2105,15 @@ def get_block_list(self, block_list_type="committed", **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - return self._get_block_list_result(blocks) - - def _commit_block_list_options( # type: ignore - self, block_list, # type: List[BlobBlock] - content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) - for block in block_list: - try: - if block.state.value == 'committed': - block_lookup.committed.append(encode_base64(str(block.id))) - elif block.state.value == 'uncommitted': - block_lookup.uncommitted.append(encode_base64(str(block.id))) - else: - block_lookup.latest.append(encode_base64(str(block.id))) - except AttributeError: - block_lookup.latest.append(encode_base64(str(block))) - headers = kwargs.pop('headers', {}) - headers.update(add_metadata_headers(metadata)) - blob_headers = None - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if content_settings: - blob_headers = BlobHTTPHeaders( - blob_cache_control=content_settings.cache_control, - blob_content_type=content_settings.content_type, - blob_content_md5=bytearray(content_settings.content_md5) if content_settings.content_md5 else None, - blob_content_encoding=content_settings.content_encoding, - blob_content_language=content_settings.content_language, - blob_content_disposition=content_settings.content_disposition - ) - - validate_content = kwargs.pop('validate_content', False) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - tier = kwargs.pop('standard_blob_tier', None) - blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) - - options = { - 'blocks': block_lookup, - 'blob_http_headers': blob_headers, - 'lease_access_conditions': access_conditions, - 'timeout': kwargs.pop('timeout', None), - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers, - 'validate_content': validate_content, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'tier': tier.value if tier else None, - 'blob_tags_string': blob_tags_string, - 'headers': headers - } - options.update(kwargs) - return options + return _get_block_list_result(blocks) @distributed_trace - def commit_block_list( # type: ignore - self, block_list, # type: List[BlobBlock] - content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + def commit_block_list( + self, block_list: List[BlobBlock], + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """The Commit Block List operation writes a blob by specifying the list of block IDs that make up the blob. @@ -2410,7 +2130,7 @@ def commit_block_list( # type: ignore The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -2419,6 +2139,18 @@ def commit_block_list( # type: ignore Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword bool validate_content: If true, calculates an MD5 hash of the page content. The storage service checks the hash of the content that has arrived @@ -2465,23 +2197,30 @@ def commit_block_list( # type: ignore .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._commit_block_list_options( - block_list, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _commit_block_list_options( + block_list=block_list, content_settings=content_settings, metadata=metadata, **kwargs) try: - return self._client.block_blob.commit_block_list(**options) # type: ignore + return cast(Dict[str, Any], self._client.block_blob.commit_block_list(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): - # type: (Union[str, PremiumPageBlobTier], **Any) -> None + def set_premium_page_blob_tier(self, premium_page_blob_tier: "PremiumPageBlobTier", **kwargs: Any) -> None: """Sets the page blob tiers on the blob. This API is only supported for page blobs on premium accounts. :param premium_page_blob_tier: @@ -2496,9 +2235,11 @@ def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make - multiple calls to the Azure service and the timeout will apply to - each call individually. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -2519,23 +2260,8 @@ def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): except HttpResponseError as error: process_storage_error(error) - def _set_blob_tags_options(self, tags=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] - tags = serialize_blob_tags(tags) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - options = { - 'tags': tags, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def set_blob_tags(self, tags=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] + def set_blob_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: """The Set Tags operation enables users to set tags on a blob or specific blob version, but not snapshot. Each call to this operation replaces all existing tags attached to the blob. To remove all tags from the blob, call this operation with no tags set. @@ -2548,7 +2274,7 @@ def set_blob_tags(self, tags=None, **kwargs): The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) :type tags: dict(str, str) :keyword str version_id: The version id parameter is an opaque DateTime @@ -2568,39 +2294,29 @@ def set_blob_tags(self, tags=None, **kwargs): or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Any] """ - options = self._set_blob_tags_options(tags=tags, **kwargs) + version_id = get_version_id(self.version_id, kwargs) + options = _set_blob_tags_options(version_id=version_id, tags=tags, **kwargs) try: - return self._client.blob.set_tags(**options) + return cast(Dict[str, Any], self._client.blob.set_tags(**options)) except HttpResponseError as error: process_storage_error(error) - def _get_blob_tags_options(self, **kwargs): - # type: (**Any) -> Dict[str, str] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - options = { - 'version_id': kwargs.pop('version_id', None), - 'snapshot': self.snapshot, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'timeout': kwargs.pop('timeout', None), - 'cls': return_headers_and_deserialized} - return options - @distributed_trace - def get_blob_tags(self, **kwargs): - # type: (**Any) -> Dict[str, str] + def get_blob_tags(self, **kwargs: Any) -> Dict[str, str]: """The Get Tags operation enables users to get tags on a blob or specific blob version, or snapshot. .. versionadded:: 12.4.0 This operation was introduced in API version '2019-12-12'. - :keyword str version_id: + :keyword Optional[str] version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to add tags to. :keyword str if_tags_match_condition: @@ -2611,59 +2327,30 @@ def get_blob_tags(self, **kwargs): or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Key value pairs of blob tags. :rtype: Dict[str, str] """ - options = self._get_blob_tags_options(**kwargs) + version_id = get_version_id(self.version_id, kwargs) + options = _get_blob_tags_options(version_id=version_id, snapshot=self.snapshot, **kwargs) try: _, tags = self._client.blob.get_tags(**options) - return parse_tags(tags) # pylint: disable=protected-access + return cast(Dict[str, str], parse_tags(tags)) except HttpResponseError as error: process_storage_error(error) - def _get_page_ranges_options( # type: ignore - self, offset=None, # type: Optional[int] - length=None, # type: Optional[int] - previous_snapshot_diff=None, # type: Optional[Union[str, Dict[str, Any]]] - **kwargs - ): - # type: (...) -> Dict[str, Any] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if length is not None and offset is None: - raise ValueError("Offset value must not be None if length is set.") - if length is not None: - length = offset + length - 1 # Reformat to an inclusive range index - page_range, _ = validate_and_format_range_headers( - offset, length, start_range_required=False, end_range_required=False, align_to_page=True - ) - options = { - 'snapshot': self.snapshot, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'timeout': kwargs.pop('timeout', None), - 'range': page_range} - if previous_snapshot_diff: - try: - options['prevsnapshot'] = previous_snapshot_diff.snapshot # type: ignore - except AttributeError: - try: - options['prevsnapshot'] = previous_snapshot_diff['snapshot'] # type: ignore - except TypeError: - options['prevsnapshot'] = previous_snapshot_diff - options.update(kwargs) - return options - @distributed_trace - def get_page_ranges( # type: ignore - self, offset=None, # type: Optional[int] - length=None, # type: Optional[int] - previous_snapshot_diff=None, # type: Optional[Union[str, Dict[str, Any]]] - **kwargs - ): - # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] - """Returns the list of valid page ranges for a Page Blob or snapshot + def get_page_ranges( + self, offset: Optional[int] = None, + length: Optional[int] = None, + previous_snapshot_diff: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: + """DEPRECATED: Returns the list of valid page ranges for a Page Blob or snapshot of a page blob. :param int offset: @@ -2712,13 +2399,23 @@ def get_page_ranges( # type: ignore .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. The first element are filled page ranges, the 2nd element is cleared page ranges. :rtype: tuple(list(dict(str, str), list(dict(str, str)) """ - options = self._get_page_ranges_options( + warnings.warn( + "get_page_ranges is deprecated, use list_page_ranges instead", + DeprecationWarning + ) + + options = _get_page_ranges_options( + snapshot=self.snapshot, offset=offset, length=length, previous_snapshot_diff=previous_snapshot_diff, @@ -2732,14 +2429,104 @@ def get_page_ranges( # type: ignore process_storage_error(error) return get_page_ranges_result(ranges) + @distributed_trace + def list_page_ranges( + self, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + previous_snapshot: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> ItemPaged[PageRange]: + """Returns the list of valid page ranges for a Page Blob or snapshot + of a page blob. If `previous_snapshot` is specified, the result will be + a diff of changes between the target blob and the previous snapshot. + + :keyword int offset: + Start of byte range to use for getting valid page ranges. + If no length is given, all bytes after the offset will be searched. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword int length: + Number of bytes to use for getting valid page ranges. + If length is given, offset must be provided. + This range will return valid page ranges from the offset start up to + the specified length. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword previous_snapshot: + A snapshot value that specifies that the response will contain only pages that were changed + between target blob and previous snapshot. Changed pages include both updated and cleared + pages. The target blob may be a snapshot, as long as the snapshot specified by `previous_snapshot` + is the older of the two. + :paramtype previous_snapshot: str or Dict[str, Any] + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int results_per_page: + The maximum number of page ranges to retrieve per API call. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: An iterable (auto-paging) of PageRange. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.PageRange] + """ + results_per_page = kwargs.pop('results_per_page', None) + options = _get_page_ranges_options( + snapshot=self.snapshot, + offset=offset, + length=length, + previous_snapshot_diff=previous_snapshot, + **kwargs) + + if previous_snapshot: + command = partial( + self._client.page_blob.get_page_ranges_diff, + **options) + else: + command = partial( + self._client.page_blob.get_page_ranges, + **options) + return ItemPaged( + command, results_per_page=results_per_page, + page_iterator_class=PageRangePaged) + @distributed_trace def get_page_range_diff_for_managed_disk( - self, previous_snapshot_url, # type: str - offset=None, # type: Optional[int] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] + self, previous_snapshot_url: str, + offset: Optional[int] = None, + length:Optional[int] = None, + **kwargs: Any + ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: """Returns the list of valid page ranges for a managed disk or snapshot. .. note:: @@ -2748,7 +2535,7 @@ def get_page_range_diff_for_managed_disk( .. versionadded:: 12.2.0 This operation was introduced in API version '2019-07-07'. - :param previous_snapshot_url: + :param str previous_snapshot_url: Specifies the URL of a previous snapshot of the managed disk. The response will only contain pages that were changed between the target blob and its previous snapshot. @@ -2788,13 +2575,18 @@ def get_page_range_diff_for_managed_disk( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. The first element are filled page ranges, the 2nd element is cleared page ranges. :rtype: tuple(list(dict(str, str), list(dict(str, str)) """ - options = self._get_page_ranges_options( + options = _get_page_ranges_options( + snapshot=self.snapshot, offset=offset, length=length, prev_snapshot_url=previous_snapshot_url, @@ -2805,25 +2597,12 @@ def get_page_range_diff_for_managed_disk( process_storage_error(error) return get_page_ranges_result(ranges) - def _set_sequence_number_options(self, sequence_number_action, sequence_number=None, **kwargs): - # type: (Union[str, SequenceNumberAction], Optional[str], **Any) -> Dict[str, Any] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if sequence_number_action is None: - raise ValueError("A sequence number action must be specified") - options = { - 'sequence_number_action': sequence_number_action, - 'timeout': kwargs.pop('timeout', None), - 'blob_sequence_number': sequence_number, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def set_sequence_number(self, sequence_number_action, sequence_number=None, **kwargs): - # type: (Union[str, SequenceNumberAction], Optional[str], **Any) -> Dict[str, Union[str, datetime]] + def set_sequence_number( + self, sequence_number_action: Union[str, "SequenceNumberAction"], + sequence_number: Optional[str] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets the blob sequence number. :param str sequence_number_action: @@ -2861,44 +2640,22 @@ def set_sequence_number(self, sequence_number_action, sequence_number=None, **kw .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._set_sequence_number_options( - sequence_number_action, sequence_number=sequence_number, **kwargs) + options = _set_sequence_number_options(sequence_number_action, sequence_number=sequence_number, **kwargs) try: - return self._client.page_blob.update_sequence_number(**options) # type: ignore + return cast(Dict[str, Any], self._client.page_blob.update_sequence_number(**options)) except HttpResponseError as error: process_storage_error(error) - def _resize_blob_options(self, size, **kwargs): - # type: (int, **Any) -> Dict[str, Any] - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - if size is None: - raise ValueError("A content length must be specified for a Page Blob.") - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'blob_content_length': size, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def resize_blob(self, size, **kwargs): - # type: (int, **Any) -> Dict[str, Union[str, datetime]] + def resize_blob(self, size: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: """Resizes a page blob to the specified size. If the specified value is less than the current size of the blob, @@ -2939,74 +2696,29 @@ def resize_blob(self, size, **kwargs): blob and number of allowed IOPS. This is only applicable to page blobs on premium storage accounts. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._resize_blob_options(size, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _resize_blob_options(size=size, **kwargs) try: - return self._client.page_blob.resize(**options) # type: ignore + return cast(Dict[str, Any], self._client.page_blob.resize(**options)) except HttpResponseError as error: process_storage_error(error) - def _upload_page_options( # type: ignore - self, page, # type: bytes - offset, # type: int - length, # type: int - **kwargs - ): - # type: (...) -> Dict[str, Any] - if isinstance(page, six.text_type): - page = page.encode(kwargs.pop('encoding', 'UTF-8')) - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - - if offset is None or offset % 512 != 0: - raise ValueError("offset must be an integer that aligns with 512 page size") - if length is None or length % 512 != 0: - raise ValueError("length must be an integer that aligns with 512 page size") - end_range = offset + length - 1 # Reformat to an inclusive range index - content_range = 'bytes={0}-{1}'.format(offset, end_range) # type: ignore - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - seq_conditions = SequenceNumberAccessConditions( - if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), - if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), - if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) - ) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - validate_content = kwargs.pop('validate_content', False) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'body': page[:length], - 'content_length': length, - 'transactional_content_md5': None, - 'timeout': kwargs.pop('timeout', None), - 'range': content_range, - 'lease_access_conditions': access_conditions, - 'sequence_number_access_conditions': seq_conditions, - 'modified_access_conditions': mod_conditions, - 'validate_content': validate_content, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def upload_page( # type: ignore - self, page, # type: bytes - offset, # type: int - length, # type: int - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + def upload_page( + self, page: bytes, + offset: int, + length: int, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """The Upload Pages operation writes a range of pages to a page blob. :param bytes page: @@ -3080,87 +2792,36 @@ def upload_page( # type: ignore :keyword str encoding: Defaults to UTF-8. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._upload_page_options( + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_page_options( page=page, offset=offset, length=length, **kwargs) try: - return self._client.page_blob.upload_pages(**options) # type: ignore + return cast(Dict[str, Any], self._client.page_blob.upload_pages(**options)) except HttpResponseError as error: process_storage_error(error) - def _upload_pages_from_url_options( # type: ignore - self, source_url, # type: str - offset, # type: int - length, # type: int - source_offset, # type: int - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - - # TODO: extract the code to a method format_range - if offset is None or offset % 512 != 0: - raise ValueError("offset must be an integer that aligns with 512 page size") - if length is None or length % 512 != 0: - raise ValueError("length must be an integer that aligns with 512 page size") - if source_offset is None or offset % 512 != 0: - raise ValueError("source_offset must be an integer that aligns with 512 page size") - - # Format range - end_range = offset + length - 1 - destination_range = 'bytes={0}-{1}'.format(offset, end_range) - source_range = 'bytes={0}-{1}'.format(source_offset, source_offset + length - 1) # should subtract 1 here? - - seq_conditions = SequenceNumberAccessConditions( - if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), - if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), - if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) - ) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - source_mod_conditions = get_source_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - source_content_md5 = kwargs.pop('source_content_md5', None) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'source_url': source_url, - 'content_length': 0, - 'source_range': source_range, - 'range': destination_range, - 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'sequence_number_access_conditions': seq_conditions, - 'modified_access_conditions': mod_conditions, - 'source_modified_access_conditions': source_mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def upload_pages_from_url(self, source_url, # type: str - offset, # type: int - length, # type: int - source_offset, # type: int - **kwargs - ): - # type: (...) -> Dict[str, Any] + def upload_pages_from_url( + self, source_url: str, + offset: int, + length: int, + source_offset: int, + **kwargs: Any + ) -> Dict[str, Any]: """ The Upload Pages operation writes a range of pages to a page blob where the contents are read from a URL. @@ -3250,61 +2911,35 @@ def upload_pages_from_url(self, source_url, # type: str .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Response after uploading pages from specified URL. + :rtype: Dict[str, Any] """ - options = self._upload_pages_from_url_options( - source_url=self._encode_source_url(source_url), + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_pages_from_url_options( + source_url=source_url, offset=offset, length=length, source_offset=source_offset, **kwargs ) try: - return self._client.page_blob.upload_pages_from_url(**options) # type: ignore + return cast(Dict[str, Any], self._client.page_blob.upload_pages_from_url(**options)) except HttpResponseError as error: process_storage_error(error) - def _clear_page_options(self, offset, length, **kwargs): - # type: (int, int, **Any) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - seq_conditions = SequenceNumberAccessConditions( - if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), - if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), - if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) - ) - mod_conditions = get_modify_conditions(kwargs) - if offset is None or offset % 512 != 0: - raise ValueError("offset must be an integer that aligns with 512 page size") - if length is None or length % 512 != 0: - raise ValueError("length must be an integer that aligns with 512 page size") - end_range = length + offset - 1 # Reformat to an inclusive range index - content_range = 'bytes={0}-{1}'.format(offset, end_range) - - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'content_length': 0, - 'timeout': kwargs.pop('timeout', None), - 'range': content_range, - 'lease_access_conditions': access_conditions, - 'sequence_number_access_conditions': seq_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def clear_page(self, offset, length, **kwargs): - # type: (int, int, **Any) -> Dict[str, Union[str, datetime]] + def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: """Clears a range of pages. :param int offset: @@ -3359,77 +2994,34 @@ def clear_page(self, offset, length, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._clear_page_options(offset, length, **kwargs) + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _clear_page_options( + offset=offset, + length=length, + **kwargs + ) try: - return self._client.page_blob.clear_pages(**options) # type: ignore + return cast(Dict[str, Any], self._client.page_blob.clear_pages(**options)) except HttpResponseError as error: process_storage_error(error) - def _append_block_options( # type: ignore - self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - - if isinstance(data, six.text_type): - data = data.encode(kwargs.pop('encoding', 'UTF-8')) # type: ignore - if length is None: - length = get_length(data) - if length is None: - length, data = read_length(data) - if length == 0: - return {} - if isinstance(data, bytes): - data = data[:length] - - appendpos_condition = kwargs.pop('appendpos_condition', None) - maxsize_condition = kwargs.pop('maxsize_condition', None) - validate_content = kwargs.pop('validate_content', False) - append_conditions = None - if maxsize_condition or appendpos_condition is not None: - append_conditions = AppendPositionAccessConditions( - max_size=maxsize_condition, - append_position=appendpos_condition - ) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - options = { - 'body': data, - 'content_length': length, - 'timeout': kwargs.pop('timeout', None), - 'transactional_content_md5': None, - 'lease_access_conditions': access_conditions, - 'append_position_access_conditions': append_conditions, - 'modified_access_conditions': mod_conditions, - 'validate_content': validate_content, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def append_block( # type: ignore - self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime, int]] + def append_block( + self, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + length: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime, int]]: """Commits a new block of data to the end of the existing append blob. :param data: @@ -3499,85 +3091,35 @@ def append_block( # type: ignore .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). :rtype: dict(str, Any) """ - options = self._append_block_options( - data, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _append_block_options( + data=data, length=length, **kwargs ) try: - return self._client.append_blob.append_block(**options) # type: ignore + return cast(Dict[str, Any], self._client.append_blob.append_block(**options)) except HttpResponseError as error: process_storage_error(error) - def _append_block_from_url_options( # type: ignore - self, copy_source_url, # type: str - source_offset=None, # type: Optional[int] - source_length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - - # If end range is provided, start range must be provided - if source_length is not None and source_offset is None: - raise ValueError("source_offset should also be specified if source_length is specified") - # Format based on whether length is present - source_range = None - if source_length is not None: - end_range = source_offset + source_length - 1 - source_range = 'bytes={0}-{1}'.format(source_offset, end_range) - elif source_offset is not None: - source_range = "bytes={0}-".format(source_offset) - - appendpos_condition = kwargs.pop('appendpos_condition', None) - maxsize_condition = kwargs.pop('maxsize_condition', None) - source_content_md5 = kwargs.pop('source_content_md5', None) - append_conditions = None - if maxsize_condition or appendpos_condition is not None: - append_conditions = AppendPositionAccessConditions( - max_size=maxsize_condition, - append_position=appendpos_condition - ) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - source_mod_conditions = get_source_conditions(kwargs) - cpk_scope_info = get_cpk_scope_info(kwargs) - cpk = kwargs.pop('cpk', None) - cpk_info = None - if cpk: - if self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) - - options = { - 'source_url': copy_source_url, - 'content_length': 0, - 'source_range': source_range, - 'source_content_md5': source_content_md5, - 'transactional_content_md5': None, - 'lease_access_conditions': access_conditions, - 'append_position_access_conditions': append_conditions, - 'modified_access_conditions': mod_conditions, - 'source_modified_access_conditions': source_mod_conditions, - 'cpk_scope_info': cpk_scope_info, - 'cpk_info': cpk_info, - 'cls': return_response_headers, - 'timeout': kwargs.pop('timeout', None)} - options.update(kwargs) - return options - @distributed_trace - def append_block_from_url(self, copy_source_url, # type: str - source_offset=None, # type: Optional[int] - source_length=None, # type: Optional[int] - **kwargs): - # type: (...) -> Dict[str, Union[str, datetime, int]] + def append_block_from_url( + self, copy_source_url: str, + source_offset: Optional[int] = None, + source_length: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime, int]]: """ Creates a new block to be committed as part of a blob, where the contents are read from a source url. @@ -3661,45 +3203,35 @@ def append_block_from_url(self, copy_source_url, # type: str .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Result after appending a new block. + :rtype: Dict[str, Union[str, datetime, int]] """ - options = self._append_block_from_url_options( - copy_source_url=self._encode_source_url(copy_source_url), + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _append_block_from_url_options( + copy_source_url=copy_source_url, source_offset=source_offset, source_length=source_length, **kwargs ) try: - return self._client.append_blob.append_block_from_url(**options) # type: ignore + return cast(Dict[str, Union[str, datetime, int]], + self._client.append_blob.append_block_from_url(**options)) except HttpResponseError as error: process_storage_error(error) - def _seal_append_blob_options(self, **kwargs): - # type: (...) -> Dict[str, Any] - if self.require_encryption or (self.key_encryption_key is not None): - raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - - appendpos_condition = kwargs.pop('appendpos_condition', None) - append_conditions = None - if appendpos_condition is not None: - append_conditions = AppendPositionAccessConditions( - append_position=appendpos_condition - ) - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - mod_conditions = get_modify_conditions(kwargs) - - options = { - 'timeout': kwargs.pop('timeout', None), - 'lease_access_conditions': access_conditions, - 'append_position_access_conditions': append_conditions, - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers} - options.update(kwargs) - return options - @distributed_trace - def seal_append_blob(self, **kwargs): - # type: (...) -> Dict[str, Union[str, datetime, int]] + def seal_append_blob(self, **kwargs: Any) -> Dict[str, Union[str, datetime, int]]: """The Seal operation seals the Append Blob to make it read-only. .. versionadded:: 12.4.0 @@ -3732,12 +3264,51 @@ def seal_append_blob(self, **kwargs): :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). :rtype: dict(str, Any) """ - options = self._seal_append_blob_options(**kwargs) + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + options = _seal_append_blob_options(**kwargs) try: - return self._client.append_blob.seal(**options) # type: ignore + return cast(Dict[str, Any], self._client.append_blob.seal(**options)) except HttpResponseError as error: process_storage_error(error) + + @distributed_trace + def _get_container_client(self) -> "ContainerClient": + """Get a client to interact with the blob's parent container. + + The container need not already exist. Defaults to current blob's credentials. + + :returns: A ContainerClient. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START get_container_client_from_blob_client] + :end-before: [END get_container_client_from_blob_client] + :language: python + :dedent: 8 + :caption: Get container client from blob object. + """ + from ._container_client import ContainerClient + if not isinstance(self._pipeline._transport, TransportWrapper): # pylint: disable = protected-access + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline + return ContainerClient( + f"{self.scheme}://{self.primary_hostname}", container_name=self.container_name, + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_client_helpers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_client_helpers.py new file mode 100644 index 000000000000..a04f0ea02525 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_client_helpers.py @@ -0,0 +1,1246 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines + +from io import BytesIO +from typing import ( + Any, AnyStr, AsyncGenerator, AsyncIterable, cast, + Dict, IO, Iterable, List, Optional, Tuple, Union, + TYPE_CHECKING +) +from urllib.parse import quote, unquote, urlparse + +from ._deserialize import deserialize_blob_stream +from ._encryption import modify_user_agent_for_encryption, _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION +from ._generated.models import ( + AppendPositionAccessConditions, + BlobHTTPHeaders, + BlockList, + BlockLookupList, + CpkInfo, + DeleteSnapshotsOptionType, + QueryRequest, + SequenceNumberAccessConditions +) +from ._models import ( + BlobBlock, + BlobProperties, + BlobType, + DelimitedJsonDialect, + DelimitedTextDialect, + PremiumPageBlobTier, + QuickQueryDialect +) +from ._serialize import ( + get_access_conditions, + get_cpk_scope_info, + get_modify_conditions, + get_source_conditions, + serialize_blob_tags_header, + serialize_blob_tags, + serialize_query_format +) +from ._shared import encode_base64 +from ._shared.base_client import parse_query +from ._shared.request_handlers import ( + add_metadata_headers, + get_length, + read_length, + validate_and_format_range_headers +) +from ._shared.response_handlers import return_headers_and_deserialized, return_response_headers +from ._shared.uploads import IterStreamer +from ._shared.uploads_async import AsyncIterStreamer +from ._upload_helpers import _any_conditions + +if TYPE_CHECKING: + from urllib.parse import ParseResult + from ._generated import AzureBlobStorage + from ._models import ContentSettings + from ._shared.models import StorageConfiguration + + +def _parse_url( + account_url: str, + container_name: str, + blob_name: str +) -> Tuple["ParseResult", Optional[str], Optional[str]]: + try: + if not account_url.lower().startswith('http'): + account_url = "https://" + account_url + except AttributeError as exc: + raise ValueError("Account URL must be a string.") from exc + parsed_url = urlparse(account_url.rstrip('/')) + + if not (container_name and blob_name): + raise ValueError("Please specify a container name and blob name.") + if not parsed_url.netloc: + raise ValueError(f"Invalid URL: {account_url}") + + path_snapshot, sas_token = parse_query(parsed_url.query) + + return parsed_url, sas_token, path_snapshot + +def _format_url(container_name: Union[bytes, str], scheme: str, blob_name: str, query_str: str, hostname: str) -> str: + if isinstance(container_name, str): + container_name = container_name.encode('UTF-8') + return f"{scheme}://{hostname}/{quote(container_name)}/{quote(blob_name, safe='~/')}{query_str}" + +def _encode_source_url(source_url: str) -> str: + parsed_source_url = urlparse(source_url) + source_scheme = parsed_source_url.scheme + source_hostname = parsed_source_url.netloc.rstrip('/') + source_path = unquote(parsed_source_url.path) + source_query = parsed_source_url.query + result = [f"{source_scheme}://{source_hostname}{quote(source_path, safe='~/')}"] + if source_query: + result.append(source_query) + return '?'.join(result) + +def _upload_blob_options( # pylint:disable=too-many-statements + data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], + blob_type: Union[str, BlobType], + length: Optional[int], + metadata: Optional[Dict[str, str]], + encryption_options: Dict[str, Any], + config: "StorageConfiguration", + sdk_moniker: str, + client: "AzureBlobStorage", + **kwargs: Any +) -> Dict[str, Any]: + encoding = kwargs.pop('encoding', 'UTF-8') + if isinstance(data, str): + data = data.encode(encoding) + if length is None: + length = get_length(data) + if isinstance(data, bytes): + data = data[:length] + + stream: Optional[Any] = None + if isinstance(data, bytes): + stream = BytesIO(data) + elif hasattr(data, 'read'): + stream = data + elif hasattr(data, '__iter__') and not isinstance(data, (list, tuple, set, dict)): + stream = IterStreamer(data, encoding=encoding) + elif hasattr(data, '__aiter__'): + stream = AsyncIterStreamer(cast(AsyncGenerator, data), encoding=encoding) + else: + raise TypeError(f"Unsupported data type: {type(data)}") + + validate_content = kwargs.pop('validate_content', False) + content_settings = kwargs.pop('content_settings', None) + overwrite = kwargs.pop('overwrite', False) + max_concurrency = kwargs.pop('max_concurrency', 1) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + kwargs['cpk_info'] = cpk_info + + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + kwargs['lease_access_conditions'] = get_access_conditions(kwargs.pop('lease', None)) + kwargs['modified_access_conditions'] = get_modify_conditions(kwargs) + kwargs['cpk_scope_info'] = get_cpk_scope_info(kwargs) + if content_settings: + kwargs['blob_headers'] = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + kwargs['blob_tags_string'] = serialize_blob_tags_header(kwargs.pop('tags', None)) + kwargs['stream'] = stream + kwargs['length'] = length + kwargs['overwrite'] = overwrite + kwargs['headers'] = headers + kwargs['validate_content'] = validate_content + kwargs['blob_settings'] = config + kwargs['max_concurrency'] = max_concurrency + kwargs['encryption_options'] = encryption_options + # Add feature flag to user agent for encryption + if encryption_options['key']: + modify_user_agent_for_encryption( + config.user_agent_policy.user_agent, + sdk_moniker, + encryption_options['version'], + kwargs) + + if blob_type == BlobType.BlockBlob: + kwargs['client'] = client.block_blob + elif blob_type == BlobType.PageBlob: + if (encryption_options['version'] == '2.0' and + (encryption_options['required'] or encryption_options['key'] is not None)): + raise ValueError("Encryption version 2.0 does not currently support page blobs.") + kwargs['client'] = client.page_blob + elif blob_type == BlobType.AppendBlob: + if encryption_options['required'] or (encryption_options['key'] is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + kwargs['client'] = client.append_blob + else: + raise ValueError(f"Unsupported BlobType: {blob_type}") + return kwargs + +def _upload_blob_from_url_options(source_url: str, **kwargs: Any) -> Dict[str, Any]: + metadata = kwargs.pop('metadata', None) + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + source_url = _encode_source_url(source_url=source_url) + tier = kwargs.pop('standard_blob_tier', None) + overwrite = kwargs.pop('overwrite', False) + content_settings = kwargs.pop('content_settings', None) + source_authorization = kwargs.pop('source_authorization', None) + if content_settings: + kwargs['blob_http_headers'] = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=None, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'copy_source_authorization': source_authorization, + 'content_length': 0, + 'copy_source_blob_properties': kwargs.pop('include_source_blob_properties', True), + 'source_content_md5': kwargs.pop('source_content_md5', None), + 'copy_source': source_url, + 'modified_access_conditions': get_modify_conditions(kwargs), + 'blob_tags_string': serialize_blob_tags_header(kwargs.pop('tags', None)), + 'cls': return_response_headers, + 'lease_access_conditions': get_access_conditions(kwargs.pop('destination_lease', None)), + 'tier': tier.value if tier else None, + 'source_modified_access_conditions': get_source_conditions(kwargs), + 'cpk_info': cpk_info, + 'cpk_scope_info': get_cpk_scope_info(kwargs), + 'headers': headers, + } + options.update(kwargs) + if not overwrite and not _any_conditions(**options): + options['modified_access_conditions'].if_none_match = '*' + return options + +def _download_blob_options( + blob_name: str, + container_name: str, + version_id: Optional[str], + offset: Optional[int], + length: Optional[int], + encoding: Optional[str], + encryption_options: Dict[str, Any], + config: "StorageConfiguration", + sdk_moniker: str, + client: "AzureBlobStorage", + **kwargs +) -> Dict[str, Any]: + """Creates a dictionary containing the options for a download blob operation. + + :param str blob_name: + The name of the blob. + :param str container_name: + The name of the container. + :param Optional[str] version_id: + The version id parameter is a value that, when present, specifies the version of the blob to download. + :param Optional[int] offset: + Start of byte range to use for downloading a section of the blob. Must be set if length is provided. + :param Optional[int] length: + Number of bytes to read from the stream. This is optional, but should be supplied for optimal performance. + :param Optional[str] encoding: + Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :param Dict[str, Any] encryption_options: + The options for encryption, if enabled. + :param StorageConfiguration config: + The Storage configuration options. + :param str sdk_moniker: + The string representing the SDK package version. + :param AzureBlobStorage client: + The generated Blob Storage client. + :returns: A dictionary containing the download blob options. + :rtype: Dict[str, Any] + """ + if length is not None: + if offset is None: + raise ValueError("Offset must be provided if length is provided.") + length = offset + length - 1 # Service actually uses an end-range inclusive index + + validate_content = kwargs.pop('validate_content', False) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + # Add feature flag to user agent for encryption + if encryption_options['key'] or encryption_options['resolver']: + modify_user_agent_for_encryption( + config.user_agent_policy.user_agent, + sdk_moniker, + encryption_options['version'], + kwargs) + + options = { + 'clients': client, + 'config': config, + 'start_range': offset, + 'end_range': length, + 'version_id': version_id, + 'validate_content': validate_content, + 'encryption_options': { + 'required': encryption_options['required'], + 'key': encryption_options['key'], + 'resolver': encryption_options['resolver']}, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'download_cls': kwargs.pop('cls', None) or deserialize_blob_stream, + 'max_concurrency':kwargs.pop('max_concurrency', 1), + 'encoding': encoding, + 'timeout': kwargs.pop('timeout', None), + 'name': blob_name, + 'container': container_name} + options.update(kwargs) + return options + +def _quick_query_options(snapshot: Optional[str], query_expression: str, **kwargs: Any ) -> Tuple[Dict[str, Any], str]: + delimiter = '\n' + input_format = kwargs.pop('blob_format', None) + if input_format == QuickQueryDialect.DelimitedJson: + input_format = DelimitedJsonDialect() + if input_format == QuickQueryDialect.DelimitedText: + input_format = DelimitedTextDialect() + input_parquet_format = input_format == "ParquetDialect" + if input_format and not input_parquet_format: + try: + delimiter = input_format.lineterminator + except AttributeError: + try: + delimiter = input_format.delimiter + except AttributeError as exc: + raise ValueError("The Type of blob_format can only be DelimitedTextDialect or " + "DelimitedJsonDialect or ParquetDialect") from exc + output_format = kwargs.pop('output_format', None) + if output_format == QuickQueryDialect.DelimitedJson: + output_format = DelimitedJsonDialect() + if output_format == QuickQueryDialect.DelimitedText: + output_format = DelimitedTextDialect() + if output_format: + if output_format == "ParquetDialect": + raise ValueError("ParquetDialect is invalid as an output format.") + try: + delimiter = output_format.lineterminator + except AttributeError: + try: + delimiter = output_format.delimiter + except AttributeError: + pass + else: + output_format = input_format if not input_parquet_format else None + query_request = QueryRequest( + expression=query_expression, + input_serialization=serialize_query_format(input_format), + output_serialization=serialize_query_format(output_format) + ) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) + options = { + 'query_request': query_request, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'snapshot': snapshot, + 'timeout': kwargs.pop('timeout', None), + 'cls': return_headers_and_deserialized, + } + options.update(kwargs) + return options, delimiter + +def _generic_delete_blob_options(delete_snapshots: Optional[str] = None, **kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if delete_snapshots: + delete_snapshots = DeleteSnapshotsOptionType(delete_snapshots) + options = { + 'timeout': kwargs.pop('timeout', None), + 'snapshot': kwargs.pop('snapshot', None), # this is added for delete_blobs + 'delete_snapshots': delete_snapshots or None, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions} + options.update(kwargs) + return options + +def _delete_blob_options( + snapshot: Optional[str], + version_id: Optional[str], + delete_snapshots: Optional[str] = None, + **kwargs: Any +) -> Dict[str, Any]: + if snapshot and delete_snapshots: + raise ValueError("The delete_snapshots option cannot be used with a specific snapshot.") + options = _generic_delete_blob_options(delete_snapshots, **kwargs) + options['snapshot'] = snapshot + options['version_id'] = version_id + options['blob_delete_type'] = kwargs.pop('blob_delete_type', None) + return options + +def _set_http_headers_options(content_settings: Optional["ContentSettings"] = None, **kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + blob_headers = None + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + options = { + 'timeout': kwargs.pop('timeout', None), + 'blob_http_headers': blob_headers, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _set_blob_metadata_options(metadata: Optional[Dict[str, str]] = None, **kwargs: Any): + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + +def _create_page_blob_options( + size: int, + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]] = None, + **kwargs: Any +) -> Dict[str, Any]: + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + blob_headers = None + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + + sequence_number = kwargs.pop('sequence_number', None) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + tier = None + if premium_page_blob_tier: + try: + tier = premium_page_blob_tier.value # type: ignore + except AttributeError: + tier = premium_page_blob_tier # type: ignore + + blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) + + options = { + 'content_length': 0, + 'blob_content_length': size, + 'blob_sequence_number': sequence_number, + 'blob_http_headers': blob_headers, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'blob_tags_string': blob_tags_string, + 'cls': return_response_headers, + "tier": tier, + 'headers': headers} + options.update(kwargs) + return options + +def _create_append_blob_options( + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any +) -> Dict[str, Any]: + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + blob_headers = None + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) + + options = { + 'content_length': 0, + 'blob_http_headers': blob_headers, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'blob_tags_string': blob_tags_string, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + +def _create_snapshot_options(metadata: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + +def _start_copy_from_url_options( # pylint:disable=too-many-statements + source_url: str, + metadata: Optional[Dict[str, str]] = None, + incremental_copy: bool = False, + **kwargs: Any +) -> Dict[str, Any]: + source_url = _encode_source_url(source_url=source_url) + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + if 'source_lease' in kwargs: + source_lease = kwargs.pop('source_lease') + try: + headers['x-ms-source-lease-id'] = source_lease.id + except AttributeError: + headers['x-ms-source-lease-id'] = source_lease + + tier = kwargs.pop('premium_page_blob_tier', None) or kwargs.pop('standard_blob_tier', None) + tags = kwargs.pop('tags', None) + + # Options only available for sync copy + requires_sync = kwargs.pop('requires_sync', None) + encryption_scope_str = kwargs.pop('encryption_scope', None) + source_authorization = kwargs.pop('source_authorization', None) + # If tags is a str, interpret that as copy_source_tags + copy_source_tags = isinstance(tags, str) + + if incremental_copy: + if source_authorization: + raise ValueError("Source authorization tokens are not applicable for incremental copying.") + if copy_source_tags: + raise ValueError("Copying source tags is not applicable for incremental copying.") + + # TODO: refactor start_copy_from_url api in _blob_client.py. Call _generated/_blob_operations.py copy_from_url + # when requires_sync=True is set. + # Currently both sync copy and async copy are calling _generated/_blob_operations.py start_copy_from_url. + # As sync copy diverges more from async copy, more problem will surface. + if requires_sync is True: + headers['x-ms-requires-sync'] = str(requires_sync) + if encryption_scope_str: + headers['x-ms-encryption-scope'] = encryption_scope_str + if source_authorization: + headers['x-ms-copy-source-authorization'] = source_authorization + if copy_source_tags: + headers['x-ms-copy-source-tag-option'] = tags + else: + if encryption_scope_str: + raise ValueError( + "Encryption_scope is only supported for sync copy, please specify requires_sync=True") + if source_authorization: + raise ValueError( + "Source authorization tokens are only supported for sync copy, please specify requires_sync=True") + if copy_source_tags: + raise ValueError( + "Copying source tags is only supported for sync copy, please specify requires_sync=True") + + timeout = kwargs.pop('timeout', None) + dest_mod_conditions = get_modify_conditions(kwargs) + blob_tags_string = serialize_blob_tags_header(tags) if not copy_source_tags else None + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + options = { + 'copy_source': source_url, + 'seal_blob': kwargs.pop('seal_destination_blob', None), + 'timeout': timeout, + 'modified_access_conditions': dest_mod_conditions, + 'blob_tags_string': blob_tags_string, + 'headers': headers, + 'cls': return_response_headers, + } + if not incremental_copy: + source_mod_conditions = get_source_conditions(kwargs) + dest_access_conditions = get_access_conditions(kwargs.pop('destination_lease', None)) + options['source_modified_access_conditions'] = source_mod_conditions + options['lease_access_conditions'] = dest_access_conditions + options['tier'] = tier.value if tier else None + options.update(kwargs) + return options + +def _abort_copy_options(copy_id: Union[str, Dict[str, Any], BlobProperties], **kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + if isinstance(copy_id, BlobProperties): + copy_id = copy_id.copy.id # type: ignore [assignment] + elif isinstance(copy_id, dict): + copy_id = copy_id['copy_id'] + options = { + 'copy_id': copy_id, + 'lease_access_conditions': access_conditions, + 'timeout': kwargs.pop('timeout', None)} + options.update(kwargs) + return options + +def _stage_block_options( + block_id: str, + data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + length: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: + block_id = encode_base64(str(block_id)) + if isinstance(data, str): + data = data.encode(kwargs.pop('encoding', 'UTF-8')) # type: ignore + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + if length is None: + length = get_length(data) + if length is None: + length, data = read_length(data) + if isinstance(data, bytes): + data = data[:length] + + validate_content = kwargs.pop('validate_content', False) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'block_id': block_id, + 'content_length': length, + 'body': data, + 'transactional_content_md5': None, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + } + options.update(kwargs) + return options + +def _stage_block_from_url_options( + block_id: str, + source_url: str, + source_offset: Optional[int] = None, + source_length: Optional[int] = None, + source_content_md5: Optional[Union[bytes, bytearray]] = None, + **kwargs: Any +) -> Dict[str, Any]: + source_url = _encode_source_url(source_url=source_url) + source_authorization = kwargs.pop('source_authorization', None) + if source_length is not None and source_offset is None: + raise ValueError("Source offset value must not be None if length is set.") + if source_length is not None and source_offset is not None: + source_length = source_offset + source_length - 1 + block_id = encode_base64(str(block_id)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + range_header = None + if source_offset is not None: + range_header, _ = validate_and_format_range_headers(source_offset, source_length) + + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'copy_source_authorization': source_authorization, + 'block_id': block_id, + 'content_length': 0, + 'source_url': source_url, + 'source_range': range_header, + 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + } + options.update(kwargs) + return options + +def _get_block_list_result(blocks: BlockList) -> Tuple[List[BlobBlock], List[BlobBlock]]: + committed = [] + uncommitted = [] + if blocks.committed_blocks: + committed = [BlobBlock._from_generated(b) for b in blocks.committed_blocks] # pylint: disable=protected-access + if blocks.uncommitted_blocks: + uncommitted = [BlobBlock._from_generated(b) for b in blocks.uncommitted_blocks] # pylint: disable=protected-access + return committed, uncommitted + +def _commit_block_list_options( + block_list: List[BlobBlock], + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any +) -> Dict[str, Any]: + block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) + for block in block_list: + if isinstance(block, BlobBlock): + if block.state.value == 'committed': + cast(List[str], block_lookup.committed).append(encode_base64(str(block.id))) + elif block.state.value == 'uncommitted': + cast(List[str], block_lookup.uncommitted).append(encode_base64(str(block.id))) + elif block_lookup.latest is not None: + block_lookup.latest.append(encode_base64(str(block.id))) + else: + block_lookup.latest.append(encode_base64(str(block))) + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + blob_headers = None + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + + validate_content = kwargs.pop('validate_content', False) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + tier = kwargs.pop('standard_blob_tier', None) + blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) + + options = { + 'blocks': block_lookup, + 'blob_http_headers': blob_headers, + 'lease_access_conditions': access_conditions, + 'timeout': kwargs.pop('timeout', None), + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'tier': tier.value if tier else None, + 'blob_tags_string': blob_tags_string, + 'headers': headers + } + options.update(kwargs) + return options + +def _set_blob_tags_options( + version_id: Optional[str], + tags: Optional[Dict[str, str]] = None, + **kwargs: Any +)-> Dict[str, Any]: + serialized_tags = serialize_blob_tags(tags) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'tags': serialized_tags, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'version_id': version_id, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _get_blob_tags_options(version_id: Optional[str], snapshot: Optional[str], **kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'version_id': version_id, + 'snapshot': snapshot, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'timeout': kwargs.pop('timeout', None), + 'cls': return_headers_and_deserialized} + return options + +def _get_page_ranges_options( + snapshot: Optional[str], + offset: Optional[int] = None, + length: Optional[int] = None, + previous_snapshot_diff: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any +) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if length is not None and offset is None: + raise ValueError("Offset value must not be None if length is set.") + if length is not None and offset is not None: + length = offset + length - 1 # Reformat to an inclusive range index + page_range, _ = validate_and_format_range_headers( + offset, length, start_range_required=False, end_range_required=False, align_to_page=True + ) + options = { + 'snapshot': snapshot, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'timeout': kwargs.pop('timeout', None), + 'range': page_range} + if previous_snapshot_diff: + try: + options['prevsnapshot'] = previous_snapshot_diff.snapshot # type: ignore + except AttributeError: + try: + options['prevsnapshot'] = previous_snapshot_diff['snapshot'] # type: ignore + except TypeError: + options['prevsnapshot'] = previous_snapshot_diff + options.update(kwargs) + return options + +def _set_sequence_number_options( + sequence_number_action: str, + sequence_number: Optional[str] = None, + **kwargs: Any +) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if sequence_number_action is None: + raise ValueError("A sequence number action must be specified") + options = { + 'sequence_number_action': sequence_number_action, + 'timeout': kwargs.pop('timeout', None), + 'blob_sequence_number': sequence_number, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _resize_blob_options(size: int, **kwargs: Any) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if size is None: + raise ValueError("A content length must be specified for a Page Blob.") + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'blob_content_length': size, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _upload_page_options( + page: bytes, + offset: int, + length: int, + **kwargs: Any +) -> Dict[str, Any]: + if isinstance(page, str): + page = page.encode(kwargs.pop('encoding', 'UTF-8')) + if offset is None or offset % 512 != 0: + raise ValueError("offset must be an integer that aligns with 512 page size") + if length is None or length % 512 != 0: + raise ValueError("length must be an integer that aligns with 512 page size") + end_range = offset + length - 1 # Reformat to an inclusive range index + content_range = f'bytes={offset}-{end_range}' # type: ignore + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + seq_conditions = SequenceNumberAccessConditions( + if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), + if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), + if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) + ) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + validate_content = kwargs.pop('validate_content', False) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'body': page[:length], + 'content_length': length, + 'transactional_content_md5': None, + 'timeout': kwargs.pop('timeout', None), + 'range': content_range, + 'lease_access_conditions': access_conditions, + 'sequence_number_access_conditions': seq_conditions, + 'modified_access_conditions': mod_conditions, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _upload_pages_from_url_options( + source_url: str, + offset: int, + length: int, + source_offset: int, + **kwargs: Any +) -> Dict[str, Any]: + source_url = _encode_source_url(source_url=source_url) + # TODO: extract the code to a method format_range + if offset is None or offset % 512 != 0: + raise ValueError("offset must be an integer that aligns with 512 page size") + if length is None or length % 512 != 0: + raise ValueError("length must be an integer that aligns with 512 page size") + if source_offset is None or offset % 512 != 0: + raise ValueError("source_offset must be an integer that aligns with 512 page size") + + # Format range + end_range = offset + length - 1 + destination_range = f'bytes={offset}-{end_range}' + source_range = f'bytes={source_offset}-{source_offset + length - 1}' # should subtract 1 here? + + seq_conditions = SequenceNumberAccessConditions( + if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), + if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), + if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) + ) + source_authorization = kwargs.pop('source_authorization', None) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + source_mod_conditions = get_source_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + source_content_md5 = kwargs.pop('source_content_md5', None) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'copy_source_authorization': source_authorization, + 'source_url': source_url, + 'content_length': 0, + 'source_range': source_range, + 'range': destination_range, + 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'sequence_number_access_conditions': seq_conditions, + 'modified_access_conditions': mod_conditions, + 'source_modified_access_conditions': source_mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _clear_page_options( + offset: int, + length: int, + **kwargs: Any +) -> Dict[str, Any]: + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + seq_conditions = SequenceNumberAccessConditions( + if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), + if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), + if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) + ) + mod_conditions = get_modify_conditions(kwargs) + if offset is None or offset % 512 != 0: + raise ValueError("offset must be an integer that aligns with 512 page size") + if length is None or length % 512 != 0: + raise ValueError("length must be an integer that aligns with 512 page size") + end_range = length + offset - 1 # Reformat to an inclusive range index + content_range = f'bytes={offset}-{end_range}' + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'content_length': 0, + 'timeout': kwargs.pop('timeout', None), + 'range': content_range, + 'lease_access_conditions': access_conditions, + 'sequence_number_access_conditions': seq_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _append_block_options( + data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + length: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: + if isinstance(data, str): + data = data.encode(kwargs.pop('encoding', 'UTF-8')) + if length is None: + length = get_length(data) + if length is None: + length, data = read_length(data) + if length == 0: + return {} + if isinstance(data, bytes): + data = data[:length] + + appendpos_condition = kwargs.pop('appendpos_condition', None) + maxsize_condition = kwargs.pop('maxsize_condition', None) + validate_content = kwargs.pop('validate_content', False) + append_conditions = None + if maxsize_condition or appendpos_condition is not None: + append_conditions = AppendPositionAccessConditions( + max_size=maxsize_condition, + append_position=appendpos_condition + ) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'body': data, + 'content_length': length, + 'timeout': kwargs.pop('timeout', None), + 'transactional_content_md5': None, + 'lease_access_conditions': access_conditions, + 'append_position_access_conditions': append_conditions, + 'modified_access_conditions': mod_conditions, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _append_block_from_url_options( + copy_source_url: str, + source_offset: Optional[int] = None, + source_length: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: + copy_source_url = _encode_source_url(source_url=copy_source_url) + # If end range is provided, start range must be provided + if source_length is not None and source_offset is None: + raise ValueError("source_offset should also be specified if source_length is specified") + # Format based on whether length is present + source_range = None + if source_length is not None and source_offset is not None: + end_range = source_offset + source_length - 1 + source_range = f'bytes={source_offset}-{end_range}' + elif source_offset is not None: + source_range = f"bytes={source_offset}-" + + appendpos_condition = kwargs.pop('appendpos_condition', None) + maxsize_condition = kwargs.pop('maxsize_condition', None) + source_content_md5 = kwargs.pop('source_content_md5', None) + append_conditions = None + if maxsize_condition or appendpos_condition is not None: + append_conditions = AppendPositionAccessConditions( + max_size=maxsize_condition, + append_position=appendpos_condition + ) + source_authorization = kwargs.pop('source_authorization', None) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + source_mod_conditions = get_source_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'copy_source_authorization': source_authorization, + 'source_url': copy_source_url, + 'content_length': 0, + 'source_range': source_range, + 'source_content_md5': source_content_md5, + 'transactional_content_md5': None, + 'lease_access_conditions': access_conditions, + 'append_position_access_conditions': append_conditions, + 'modified_access_conditions': mod_conditions, + 'source_modified_access_conditions': source_mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + 'timeout': kwargs.pop('timeout', None)} + options.update(kwargs) + return options + +def _seal_append_blob_options(**kwargs: Any) -> Dict[str, Any]: + appendpos_condition = kwargs.pop('appendpos_condition', None) + append_conditions = None + if appendpos_condition is not None: + append_conditions = AppendPositionAccessConditions( + append_position=appendpos_condition + ) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'append_position_access_conditions': append_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + +def _from_blob_url( + blob_url: str, + snapshot: Optional[Union[BlobProperties, str, Dict[str, Any]]] +) -> Tuple[str, str, str, Optional[str]]: + try: + if not blob_url.lower().startswith('http'): + blob_url = "https://" + blob_url + except AttributeError as exc: + raise ValueError("Blob URL must be a string.") from exc + parsed_url = urlparse(blob_url.rstrip('/')) + + if not parsed_url.netloc: + raise ValueError(f"Invalid URL: {blob_url}") + + account_path = "" + if ".core." in parsed_url.netloc: + # .core. is indicating non-customized url. Blob name with directory info can also be parsed. + path_blob = parsed_url.path.lstrip('/').split('/', maxsplit=1) + elif "localhost" in parsed_url.netloc or "127.0.0.1" in parsed_url.netloc: + path_blob = parsed_url.path.lstrip('/').split('/', maxsplit=2) + account_path += '/' + path_blob[0] + else: + # for customized url. blob name that has directory info cannot be parsed. + path_blob = parsed_url.path.lstrip('/').split('/') + if len(path_blob) > 2: + account_path = "/" + "/".join(path_blob[:-2]) + + account_url = f"{parsed_url.scheme}://{parsed_url.netloc.rstrip('/')}{account_path}?{parsed_url.query}" + + msg_invalid_url = "Invalid URL. Provide a blob_url with a valid blob and container name." + if len(path_blob) <= 1: + raise ValueError(msg_invalid_url) + container_name, blob_name = unquote(path_blob[-2]), unquote(path_blob[-1]) + if not container_name or not blob_name: + raise ValueError(msg_invalid_url) + + path_snapshot, _ = parse_query(parsed_url.query) + if snapshot: + if isinstance(snapshot, BlobProperties): + path_snapshot = snapshot.snapshot + elif isinstance(snapshot, dict): + path_snapshot = snapshot['snapshot'] + else: + path_snapshot = snapshot + return (account_url, container_name, blob_name, path_snapshot) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_service_client.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_service_client.py index 8658363f8248..f6e17cb756f0 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_service_client.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_service_client.py @@ -3,56 +3,55 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-keyword-should-match-keyword-only import functools -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, Dict, List, +import warnings +from typing import ( + Any, Dict, List, Optional, Union, TYPE_CHECKING ) +from typing_extensions import Self - -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse # type: ignore - -from azure.core.paging import ItemPaged from azure.core.exceptions import HttpResponseError +from azure.core.paging import ItemPaged from azure.core.pipeline import Pipeline from azure.core.tracing.decorator import distributed_trace - -from ._shared.models import LocationMode -from ._shared.base_client import StorageAccountHostsMixin, TransportWrapper, parse_connection_str, parse_query -from ._shared.parser import _to_utc_datetime -from ._shared.response_handlers import return_response_headers, process_storage_error, \ - parse_to_internal_user_delegation_key -from ._generated import AzureBlobStorage -from ._generated.models import StorageServiceProperties, KeyInfo -from ._container_client import ContainerClient from ._blob_client import BlobClient -from ._models import ContainerPropertiesPaged +from ._blob_service_client_helpers import _parse_url +from ._container_client import ContainerClient +from ._deserialize import service_properties_deserialize, service_stats_deserialize +from ._encryption import StorageEncryptionMixin +from ._generated import AzureBlobStorage +from ._generated.models import KeyInfo, StorageServiceProperties from ._list_blobs_helper import FilteredBlobPaged +from ._models import BlobProperties, ContainerProperties, ContainerPropertiesPaged, CorsRule from ._serialize import get_api_version -from ._deserialize import service_stats_deserialize, service_properties_deserialize +from ._shared.base_client import parse_connection_str, parse_query, StorageAccountHostsMixin, TransportWrapper +from ._shared.models import LocationMode +from ._shared.parser import _to_utc_datetime +from ._shared.response_handlers import ( + parse_to_internal_user_delegation_key, + process_storage_error, + return_response_headers +) if TYPE_CHECKING: + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential from datetime import datetime - from ._shared.models import UserDelegationKey from ._lease import BlobLeaseClient from ._models import ( - ContainerProperties, - BlobProperties, - PublicAccess, BlobAnalyticsLogging, + FilteredBlob, Metrics, - CorsRule, + PublicAccess, RetentionPolicy, - StaticWebsite, - FilteredBlob + StaticWebsite ) + from ._shared.models import UserDelegationKey -class BlobServiceClient(StorageAccountHostsMixin): +class BlobServiceClient(StorageAccountHostsMixin, StorageEncryptionMixin): """A client to interact with the Blob Service at the account level. This client provides operations to retrieve and configure the account properties @@ -60,6 +59,10 @@ class BlobServiceClient(StorageAccountHostsMixin): For operations relating to a specific container or blob, clients for those entities can also be retrieved using the `get_client` functions. + For more optional configuration, please click + `here `__. + :param str account_url: The URL to the blob storage account. Any other entities included in the URL path (e.g. container or blob) will be discarded. This URL can be optionally @@ -67,13 +70,15 @@ class BlobServiceClient(StorageAccountHostsMixin): :param credential: The credentials with which to authenticate. This is optional if the account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. :keyword str api_version: - The Storage API version to use for requests. Default value is '2019-07-07'. - Setting to an older version may result in reduced feature compatibility. + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. .. versionadded:: 12.2.0 @@ -92,6 +97,9 @@ class BlobServiceClient(StorageAccountHostsMixin): the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, or 4MB. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. .. admonition:: Example: @@ -107,43 +115,39 @@ class BlobServiceClient(StorageAccountHostsMixin): :end-before: [END create_blob_service_client_oauth] :language: python :dedent: 8 - :caption: Creating the BlobServiceClient with Azure Identity credentials. + :caption: Creating the BlobServiceClient with Default Azure Identity credentials. """ def __init__( - self, account_url, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None - try: - if not account_url.lower().startswith('http'): - account_url = "https://" + account_url - except AttributeError: - raise ValueError("Account URL must be a string.") - parsed_url = urlparse(account_url.rstrip('/')) - if not parsed_url.netloc: - raise ValueError("Invalid URL: {}".format(account_url)) - + self, account_url: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: + parsed_url, sas_token = _parse_url(account_url=account_url) _, sas_token = parse_query(parsed_url.query) self._query_str, credential = self._format_query_string(sas_token, credential) super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._configure_encryption(kwargs) def _format_url(self, hostname): """Format the endpoint URL according to the current location mode hostname. + + :param str hostname: + The hostname of the current location mode. + :returns: A formatted endpoint URL including current location mode hostname. + :rtype: str """ - return "{}://{}/{}".format(self.scheme, hostname, self._query_str) + return f"{self.scheme}://{hostname}/{self._query_str}" @classmethod def from_connection_string( - cls, conn_str, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): # type: (...) -> BlobServiceClient + cls, conn_str: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: """Create BlobServiceClient from a Connection String. :param str conn_str: @@ -152,9 +156,19 @@ def from_connection_string( The credentials with which to authenticate. This is optional if the account URL already has a SAS token, or the connection string already has shared access key values. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account shared access - key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. Credentials provided here will take precedence over those in the connection string. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. :returns: A Blob service client. :rtype: ~azure.storage.blob.BlobServiceClient @@ -173,11 +187,11 @@ def from_connection_string( return cls(account_url, credential=credential, **kwargs) @distributed_trace - def get_user_delegation_key(self, key_start_time, # type: datetime - key_expiry_time, # type: datetime - **kwargs # type: Any - ): - # type: (...) -> UserDelegationKey + def get_user_delegation_key( + self, key_start_time: "datetime", + key_expiry_time: "datetime", + **kwargs: Any + ) -> "UserDelegationKey": """ Obtain a user delegation key for the purpose of signing SAS tokens. A token credential must be present on the service object for this request to succeed. @@ -187,8 +201,12 @@ def get_user_delegation_key(self, key_start_time, # type: datetime :param ~datetime.datetime key_expiry_time: A DateTime value. Indicates when the key stops being valid. :keyword int timeout: - The timeout parameter is expressed in seconds. - :return: The user delegation key. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: The user delegation key. :rtype: ~azure.storage.blob.UserDelegationKey """ key_info = KeyInfo(start=_to_utc_datetime(key_start_time), expiry=_to_utc_datetime(key_expiry_time)) @@ -203,8 +221,7 @@ def get_user_delegation_key(self, key_start_time, # type: datetime return parse_to_internal_user_delegation_key(user_delegation_key) # type: ignore @distributed_trace - def get_account_information(self, **kwargs): - # type: (Any) -> Dict[str, str] + def get_account_information(self, **kwargs: Any) -> Dict[str, str]: """Gets information related to the storage account. The information can also be retrieved if the user has a SAS to a container or blob. @@ -228,8 +245,7 @@ def get_account_information(self, **kwargs): process_storage_error(error) @distributed_trace - def get_service_stats(self, **kwargs): - # type: (**Any) -> Dict[str, Any] + def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: """Retrieves statistics related to replication for the Blob service. It is only available when read-access geo-redundant replication is enabled for @@ -249,8 +265,12 @@ def get_service_stats(self, **kwargs): replication is enabled for your storage account. :keyword int timeout: - The timeout parameter is expressed in seconds. - :return: The blob service stats. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: The blob service stats. :rtype: Dict[str, Any] .. admonition:: Example: @@ -271,13 +291,16 @@ def get_service_stats(self, **kwargs): process_storage_error(error) @distributed_trace - def get_service_properties(self, **kwargs): - # type: (Any) -> Dict[str, Any] + def get_service_properties(self, **kwargs: Any) -> Dict[str, Any]: """Gets the properties of a storage account's Blob service, including Azure Storage Analytics. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An object containing blob service properties such as analytics logging, hour/minute metrics, cors rules, etc. :rtype: Dict[str, Any] @@ -300,16 +323,15 @@ def get_service_properties(self, **kwargs): @distributed_trace def set_service_properties( - self, analytics_logging=None, # type: Optional[BlobAnalyticsLogging] - hour_metrics=None, # type: Optional[Metrics] - minute_metrics=None, # type: Optional[Metrics] - cors=None, # type: Optional[List[CorsRule]] - target_version=None, # type: Optional[str] - delete_retention_policy=None, # type: Optional[RetentionPolicy] - static_website=None, # type: Optional[StaticWebsite] - **kwargs - ): - # type: (...) -> None + self, analytics_logging: Optional["BlobAnalyticsLogging"] = None, + hour_metrics: Optional["Metrics"] = None, + minute_metrics: Optional["Metrics"] = None, + cors: Optional[List[CorsRule]] = None, + target_version: Optional[str] = None, + delete_retention_policy: Optional["RetentionPolicy"] = None, + static_website: Optional["StaticWebsite"] = None, + **kwargs: Any + ) -> None: """Sets the properties of a storage account's Blob service, including Azure Storage Analytics. @@ -344,7 +366,11 @@ def set_service_properties( and if yes, indicates the index document and 404 error document to use. :type static_website: ~azure.storage.blob.StaticWebsite :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -365,7 +391,7 @@ def set_service_properties( logging=analytics_logging, hour_metrics=hour_metrics, minute_metrics=minute_metrics, - cors=cors, + cors=CorsRule._to_generated(cors), # pylint: disable=protected-access default_service_version=target_version, delete_retention_policy=delete_retention_policy, static_website=static_website @@ -378,11 +404,10 @@ def set_service_properties( @distributed_trace def list_containers( - self, name_starts_with=None, # type: Optional[str] - include_metadata=False, # type: Optional[bool] - **kwargs - ): - # type: (...) -> ItemPaged[ContainerProperties] + self, name_starts_with: Optional[str] = None, + include_metadata: bool = False, + **kwargs: Any + ) -> ItemPaged[ContainerProperties]: """Returns a generator to list the containers under the specified account. The generator will lazily follow the continuation tokens returned by @@ -398,11 +423,18 @@ def list_containers( Specifies that deleted containers to be returned in the response. This is for container restore enabled account. The default value is `False`. .. versionadded:: 12.4.0 + :keyword bool include_system: + Flag specifying that system containers should be included. + .. versionadded:: 12.10.0 :keyword int results_per_page: The maximum number of container names to retrieve per API call. If the request does not specify the server will return up to 5,000 items. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) of ContainerProperties. :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.ContainerProperties] @@ -419,6 +451,9 @@ def list_containers( include_deleted = kwargs.pop('include_deleted', None) if include_deleted: include.append("deleted") + include_system = kwargs.pop('include_system', None) + if include_system: + include.append("system") timeout = kwargs.pop('timeout', None) results_per_page = kwargs.pop('results_per_page', None) @@ -436,8 +471,7 @@ def list_containers( ) @distributed_trace - def find_blobs_by_tags(self, filter_expression, **kwargs): - # type: (str, **Any) -> ItemPaged[FilteredBlob] + def find_blobs_by_tags(self, filter_expression: str, **kwargs: Any) -> ItemPaged["FilteredBlob"]: """The Filter Blobs operation enables callers to list blobs across all containers whose tags match a given search expression. Filter blobs searches across all containers within a storage account but can be @@ -450,7 +484,11 @@ def find_blobs_by_tags(self, filter_expression, **kwargs): :keyword int results_per_page: The max result per page when paginating. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) response of BlobProperties. :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.FilteredBlob] """ @@ -468,12 +506,11 @@ def find_blobs_by_tags(self, filter_expression, **kwargs): @distributed_trace def create_container( - self, name, # type: str - metadata=None, # type: Optional[Dict[str, str]] - public_access=None, # type: Optional[Union[PublicAccess, str]] - **kwargs - ): - # type: (...) -> ContainerClient + self, name: str, + metadata: Optional[Dict[str, str]] = None, + public_access: Optional[Union["PublicAccess", str]] = None, + **kwargs: Any + ) -> ContainerClient: """Creates a new container under the specified account. If the container with the same name already exists, a ResourceExistsError will @@ -496,7 +533,12 @@ def create_container( :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: A container client to interact with the newly created container. :rtype: ~azure.storage.blob.ContainerClient .. admonition:: Example: @@ -517,11 +559,10 @@ def create_container( @distributed_trace def delete_container( - self, container, # type: Union[ContainerProperties, str] - lease=None, # type: Optional[Union[BlobLeaseClient, str]] - **kwargs - ): - # type: (...) -> None + self, container: Union[ContainerProperties, str], + lease: Optional[Union["BlobLeaseClient", str]] = None, + **kwargs: Any + ) -> None: """Marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. @@ -535,7 +576,7 @@ def delete_container( If specified, delete_container only succeeds if the container's lease is active and matches this ID. Required if the container has an active lease. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :type lease: ~azure.storage.blob.BlobLeaseClient or str :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -554,8 +595,11 @@ def delete_container( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. - :rtype: None + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. .. admonition:: Example: @@ -566,17 +610,55 @@ def delete_container( :dedent: 12 :caption: Deleting a container in the blob service. """ - container = self.get_container_client(container) # type: ignore + container_client = self.get_container_client(container) kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) - container.delete_container( # type: ignore + container_client.delete_container( lease=lease, timeout=timeout, **kwargs) @distributed_trace - def undelete_container(self, deleted_container_name, deleted_container_version, **kwargs): - # type: (str, str, str, **Any) -> ContainerClient + def _rename_container(self, name: str, new_name: str, **kwargs: Any) -> ContainerClient: + """Renames a container. + + Operation is successful only if the source container exists. + + :param str name: + The name of the container to rename. + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: A container client for the renamed container. + :rtype: ~azure.storage.blob.ContainerClient + """ + renamed_container = self.get_container_client(new_name) + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id + except AttributeError: + kwargs['source_lease_id'] = lease + try: + renamed_container._client.container.rename(name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def undelete_container( + self, deleted_container_name: str, + deleted_container_version: str, + **kwargs: Any + ) -> ContainerClient: """Restores soft-deleted container. Operation will only be successful if used within the specified number of days @@ -589,14 +671,18 @@ def undelete_container(self, deleted_container_name, deleted_container_version, Specifies the name of the deleted container to restore. :param str deleted_container_version: Specifies the version of the deleted container to restore. - :keyword str new_name: - The new name for the deleted container to be restored to. - If not specified deleted_container_name will be used as the restored container name. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: The undeleted ContainerClient. :rtype: ~azure.storage.blob.ContainerClient """ new_name = kwargs.pop('new_name', None) + if new_name: + warnings.warn("`new_name` is no longer supported.", DeprecationWarning) container = self.get_container_client(new_name or deleted_container_name) try: container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable = protected-access @@ -606,8 +692,7 @@ def undelete_container(self, deleted_container_name, deleted_container_version, except HttpResponseError as error: process_storage_error(error) - def get_container_client(self, container): - # type: (Union[ContainerProperties, str]) -> ContainerClient + def get_container_client(self, container: Union[ContainerProperties, str]) -> ContainerClient: """Get a client to interact with the specified container. The container need not already exist. @@ -628,9 +713,9 @@ def get_container_client(self, container): :dedent: 8 :caption: Getting the container client to interact with a specific container. """ - try: + if isinstance(container, ContainerProperties): container_name = container.name - except AttributeError: + else: container_name = container _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access @@ -640,15 +725,16 @@ def get_container_client(self, container): self.url, container_name=container_name, credential=self.credential, api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function) + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) def get_blob_client( - self, container, # type: Union[ContainerProperties, str] - blob, # type: Union[BlobProperties, str] - snapshot=None # type: Optional[Union[Dict[str, Any], str]] - ): - # type: (...) -> BlobClient + self, container: Union[ContainerProperties, str], + blob: str, + snapshot: Optional[Union[Dict[str, Any], str]] = None, + *, + version_id: Optional[str] = None + ) -> BlobClient: """Get a client to interact with the specified blob. The blob need not already exist. @@ -657,14 +743,13 @@ def get_blob_client( The container that the blob is in. This can either be the name of the container, or an instance of ContainerProperties. :type container: str or ~azure.storage.blob.ContainerProperties - :param blob: - The blob with which to interact. This can either be the name of the blob, - or an instance of BlobProperties. - :type blob: str or ~azure.storage.blob.BlobProperties + :param str blob: The name of the blob with which to interact. :param snapshot: The optional blob snapshot on which to operate. This can either be the ID of the snapshot, or a dictionary output returned by :func:`~azure.storage.blob.BlobClient.create_snapshot()`. :type snapshot: str or dict(str, Any) + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. :returns: A BlobClient. :rtype: ~azure.storage.blob.BlobClient @@ -677,21 +762,27 @@ def get_blob_client( :dedent: 12 :caption: Getting the blob client to interact with a specific blob. """ - try: - container_name = container.name - except AttributeError: - container_name = container - try: + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob_name = blob.name - except AttributeError: + else: blob_name = blob + if isinstance(container, ContainerProperties): + container_name = container.name + else: + container_name = container _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access policies=self._pipeline._impl_policies # pylint: disable = protected-access ) - return BlobClient( # type: ignore + return BlobClient( self.url, container_name=container_name, blob_name=blob_name, snapshot=snapshot, credential=self.credential, api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function) + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, + version_id=version_id) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_service_client_helpers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_service_client_helpers.py new file mode 100644 index 000000000000..d2de950b7c83 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_blob_service_client_helpers.py @@ -0,0 +1,27 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from typing import Any, Tuple, TYPE_CHECKING +from urllib.parse import urlparse +from ._shared.base_client import parse_query + +if TYPE_CHECKING: + from urllib.parse import ParseResult + + +def _parse_url(account_url: str) -> Tuple["ParseResult", Any]: + try: + if not account_url.lower().startswith('http'): + account_url = "https://" + account_url + except AttributeError as exc: + raise ValueError("Account URL must be a string.") from exc + parsed_url = urlparse(account_url.rstrip('/')) + if not parsed_url.netloc: + raise ValueError(f"Invalid URL: {account_url}") + + _, sas_token = parse_query(parsed_url.query) + + return parsed_url, sas_token diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_container_client.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_container_client.py index 5bc4835b0c3b..783df6bc753e 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_container_client.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_container_client.py @@ -1,81 +1,82 @@ -# pylint: disable=too-many-lines # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only import functools -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, AnyStr, Dict, List, Tuple, IO, Iterator, +import warnings +from datetime import datetime +from typing import ( + Any, AnyStr, cast, Dict, List, IO, Iterable, Iterator, Optional, overload, Union, TYPE_CHECKING ) +from urllib.parse import unquote, urlparse +from typing_extensions import Self - -try: - from urllib.parse import urlparse, quote, unquote -except ImportError: - from urlparse import urlparse # type: ignore - from urllib2 import quote, unquote # type: ignore - -import six - -from azure.core import MatchConditions -from azure.core.exceptions import HttpResponseError +from azure.core.exceptions import HttpResponseError, ResourceNotFoundError from azure.core.paging import ItemPaged -from azure.core.tracing.decorator import distributed_trace from azure.core.pipeline import Pipeline -from azure.core.pipeline.transport import HttpRequest - -from ._shared.base_client import StorageAccountHostsMixin, TransportWrapper, parse_connection_str, parse_query -from ._shared.request_handlers import add_metadata_headers, serialize_iso -from ._shared.response_handlers import ( - process_storage_error, - return_response_headers, - return_headers_and_deserialized) +from azure.core.tracing.decorator import distributed_trace +from ._blob_client import BlobClient +from ._container_client_helpers import ( + _format_url, + _generate_delete_blobs_options, + _generate_set_tiers_options, + _parse_url +) +from ._deserialize import deserialize_container_properties +from ._download import StorageStreamDownloader +from ._encryption import StorageEncryptionMixin from ._generated import AzureBlobStorage from ._generated.models import SignedIdentifier -from ._deserialize import deserialize_container_properties -from ._serialize import get_modify_conditions, get_container_cpk_scope_info, get_api_version, get_access_conditions -from ._models import ( # pylint: disable=unused-import - ContainerProperties, - BlobProperties, - BlobType) -from ._list_blobs_helper import BlobPrefix, BlobPropertiesPaged from ._lease import BlobLeaseClient -from ._blob_client import BlobClient +from ._list_blobs_helper import ( + BlobNamesPaged, + BlobPrefix, + BlobPropertiesPaged, + FilteredBlobPaged, + IgnoreListBlobsDeserializer +) +from ._models import ( + BlobProperties, + BlobType, + ContainerProperties, + FilteredBlob +) +from ._serialize import get_access_conditions, get_api_version, get_container_cpk_scope_info, get_modify_conditions +from ._shared.base_client import parse_connection_str, StorageAccountHostsMixin, TransportWrapper +from ._shared.request_handlers import add_metadata_headers, serialize_iso +from ._shared.response_handlers import ( + process_storage_error, + return_headers_and_deserialized, + return_response_headers +) if TYPE_CHECKING: - from azure.core.pipeline.transport import HttpTransport, HttpResponse # pylint: disable=ungrouped-imports - from azure.core.pipeline.policies import HTTPPolicy # pylint: disable=ungrouped-imports - from datetime import datetime - from ._models import ( # pylint: disable=unused-import - PublicAccess, + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential + from azure.core.pipeline.transport import HttpResponse # pylint: disable=C4756 + from azure.storage.blob import BlobServiceClient + from ._models import ( AccessPolicy, - ContentSettings, - StandardBlobTier, - PremiumPageBlobTier) - - -def _get_blob_name(blob): - """Return the blob name. - - :param blob: A blob string or BlobProperties - :rtype: str - """ - try: - return blob.get('name') - except AttributeError: - return blob + PremiumPageBlobTier, + PublicAccess, + StandardBlobTier + ) -class ContainerClient(StorageAccountHostsMixin): +class ContainerClient(StorageAccountHostsMixin, StorageEncryptionMixin): # pylint: disable=too-many-public-methods """A client to interact with a specific container, although that container may not yet exist. For operations relating to a specific blob within this container, a blob client can be retrieved using the :func:`~get_blob_client` function. + For more optional configuration, please click + `here `__. + :param str account_url: The URI to the storage account. In order to create a client given the full URI to the container, use the :func:`from_container_url` classmethod. @@ -85,13 +86,15 @@ class ContainerClient(StorageAccountHostsMixin): :param credential: The credentials with which to authenticate. This is optional if the account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. :keyword str api_version: - The Storage API version to use for requests. Default value is '2019-07-07'. - Setting to an older version may result in reduced feature compatibility. + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. .. versionadded:: 12.2.0 @@ -110,6 +113,9 @@ class ContainerClient(StorageAccountHostsMixin): the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, or 4MB. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. .. admonition:: Example: @@ -128,44 +134,41 @@ class ContainerClient(StorageAccountHostsMixin): :caption: Creating the container client directly. """ def __init__( - self, account_url, # type: str - container_name, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None - try: - if not account_url.lower().startswith('http'): - account_url = "https://" + account_url - except AttributeError: - raise ValueError("Container URL must be a string.") - parsed_url = urlparse(account_url.rstrip('/')) - if not container_name: - raise ValueError("Please specify a container name.") - if not parsed_url.netloc: - raise ValueError("Invalid URL: {}".format(account_url)) + self, account_url: str, + container_name: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: + parsed_url, sas_token = _parse_url(account_url=account_url, container_name=container_name) - _, sas_token = parse_query(parsed_url.query) self.container_name = container_name + # This parameter is used for the hierarchy traversal. Give precedence to credential. + self._raw_credential = credential if credential else sas_token self._query_str, credential = self._format_query_string(sas_token, credential) super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access + self._api_version = get_api_version(kwargs) + self._client = self._build_generated_client() + self._configure_encryption(kwargs) + + def _build_generated_client(self) -> AzureBlobStorage: + client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access + return client def _format_url(self, hostname): - container_name = self.container_name - if isinstance(container_name, six.text_type): - container_name = container_name.encode('UTF-8') - return "{}://{}/{}{}".format( - self.scheme, - hostname, - quote(container_name), - self._query_str) + return _format_url( + container_name=self.container_name, + hostname=hostname, + scheme=self.scheme, + query_str=self._query_str + ) @classmethod - def from_container_url(cls, container_url, credential=None, **kwargs): - # type: (str, Optional[Any], Any) -> ContainerClient + def from_container_url( + cls, container_url: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: """Create ContainerClient from a container url. :param str container_url: @@ -176,31 +179,37 @@ def from_container_url(cls, container_url, credential=None, **kwargs): The credentials with which to authenticate. This is optional if the account URL already has a SAS token, or the connection string already has shared access key values. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account shared access - key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. :returns: A container client. :rtype: ~azure.storage.blob.ContainerClient """ try: if not container_url.lower().startswith('http'): container_url = "https://" + container_url - except AttributeError: - raise ValueError("Container URL must be a string.") - parsed_url = urlparse(container_url.rstrip('/')) + except AttributeError as exc: + raise ValueError("Container URL must be a string.") from exc + parsed_url = urlparse(container_url) if not parsed_url.netloc: - raise ValueError("Invalid URL: {}".format(container_url)) + raise ValueError(f"Invalid URL: {container_url}") - container_path = parsed_url.path.lstrip('/').split('/') + container_path = parsed_url.path.strip('/').split('/') account_path = "" if len(container_path) > 1: account_path = "/" + "/".join(container_path[:-1]) - account_url = "{}://{}{}?{}".format( - parsed_url.scheme, - parsed_url.netloc.rstrip('/'), - account_path, - parsed_url.query) + account_url = f"{parsed_url.scheme}://{parsed_url.netloc.rstrip('/')}{account_path}?{parsed_url.query}" container_name = unquote(container_path[-1]) if not container_name: raise ValueError("Invalid URL. Please provide a URL with a valid container name") @@ -208,11 +217,11 @@ def from_container_url(cls, container_url, credential=None, **kwargs): @classmethod def from_connection_string( - cls, conn_str, # type: str - container_name, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): # type: (...) -> ContainerClient + cls, conn_str: str, + container_name: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: """Create ContainerClient from a Connection String. :param str conn_str: @@ -224,9 +233,19 @@ def from_connection_string( The credentials with which to authenticate. This is optional if the account URL already has a SAS token, or the connection string already has shared access key values. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account shared access - key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. Credentials provided here will take precedence over those in the connection string. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. :returns: A container client. :rtype: ~azure.storage.blob.ContainerClient @@ -246,8 +265,11 @@ def from_connection_string( account_url, container_name=container_name, credential=credential, **kwargs) @distributed_trace - def create_container(self, metadata=None, public_access=None, **kwargs): - # type: (Optional[Dict[str, str]], Optional[Union[PublicAccess, str]], **Any) -> None + def create_container( + self, metadata: Optional[Dict[str, str]] = None, + public_access: Optional[Union["PublicAccess", str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, "datetime"]]: """ Creates a new container under the specified account. If the container with the same name already exists, the operation fails. @@ -266,8 +288,13 @@ def create_container(self, metadata=None, public_access=None, **kwargs): :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope :keyword int timeout: - The timeout parameter is expressed in seconds. - :rtype: None + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: A dictionary of response headers. + :rtype: Dict[str, Union[str, datetime]] .. admonition:: Example: @@ -294,9 +321,45 @@ def create_container(self, metadata=None, public_access=None, **kwargs): process_storage_error(error) @distributed_trace - def delete_container( - self, **kwargs): - # type: (Any) -> None + def _rename_container(self, new_name: str, **kwargs: Any) -> "ContainerClient": + """Renames a container. + + Operation is successful only if the source container exists. + + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :type lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: The renamed container client. + :rtype: ~azure.storage.blob.ContainerClient + """ + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id + except AttributeError: + kwargs['source_lease_id'] = lease + try: + renamed_container = ContainerClient( + f"{self.scheme}://{self.primary_hostname}", container_name=new_name, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) + renamed_container._client.container.rename(self.container_name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def delete_container(self, **kwargs: Any) -> None: """ Marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. @@ -324,7 +387,11 @@ def delete_container( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -351,10 +418,10 @@ def delete_container( @distributed_trace def acquire_lease( - self, lease_duration=-1, # type: int - lease_id=None, # type: Optional[str] - **kwargs): - # type: (...) -> BlobLeaseClient + self, lease_duration: int =-1, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> BlobLeaseClient: """ Requests a new lease. If the container does not have an active lease, the Blob service creates a lease on the container and returns a new @@ -386,7 +453,11 @@ def acquire_lease( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A BlobLeaseClient object, that can be run in a context manager. :rtype: ~azure.storage.blob.BlobLeaseClient @@ -406,8 +477,7 @@ def acquire_lease( return lease @distributed_trace - def get_account_information(self, **kwargs): - # type: (**Any) -> Dict[str, str] + def get_account_information(self, **kwargs: Any) -> Dict[str, str]: """Gets information related to the storage account. The information can also be retrieved if the user has a SAS to a container or blob. @@ -422,8 +492,7 @@ def get_account_information(self, **kwargs): process_storage_error(error) @distributed_trace - def get_container_properties(self, **kwargs): - # type: (Any) -> ContainerProperties + def get_container_properties(self, **kwargs: Any) -> ContainerProperties: """Returns all user-defined metadata and system properties for the specified container. The data returned does not include the container's list of blobs. @@ -432,7 +501,11 @@ def get_container_properties(self, **kwargs): container's lease is active and matches this ID. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: Properties for the specified container within a container object. :rtype: ~azure.storage.blob.ContainerProperties @@ -460,11 +533,33 @@ def get_container_properties(self, **kwargs): return response # type: ignore @distributed_trace - def set_container_metadata( # type: ignore - self, metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + def exists(self, **kwargs: Any) -> bool: + """ + Returns True if a container exists and returns False otherwise. + + :kwarg int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: boolean + :rtype: bool + """ + try: + self._client.container.get_properties(**kwargs) + return True + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceNotFoundError: + return False + + @distributed_trace + def set_container_metadata( + self, metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, "datetime"]]: """Sets one or more user-defined name-value pairs for the specified container. Each call to this operation replaces all existing metadata attached to the container. To remove all metadata from the container, @@ -494,7 +589,11 @@ def set_container_metadata( # type: ignore An ETag value, or the wildcard character (*). Used to check if the resource has changed, and act according to the condition specified by the `match_condition` parameter. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Container-updated property dict (Etag and last modified). :rtype: dict[str, str or datetime] @@ -525,8 +624,40 @@ def set_container_metadata( # type: ignore process_storage_error(error) @distributed_trace - def get_container_access_policy(self, **kwargs): - # type: (Any) -> Dict[str, Any] + def _get_blob_service_client(self) -> "BlobServiceClient": + """Get a client to interact with the container's parent service account. + + Defaults to current container's credentials. + + :returns: A BlobServiceClient. + :rtype: ~azure.storage.blob.BlobServiceClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START get_blob_service_client_from_container_client] + :end-before: [END get_blob_service_client_from_container_client] + :language: python + :dedent: 8 + :caption: Get blob service client from container object. + """ + from ._blob_service_client import BlobServiceClient + if not isinstance(self._pipeline._transport, TransportWrapper): # pylint: disable = protected-access + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline + return BlobServiceClient( + f"{self.scheme}://{self.primary_hostname}", + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _location_mode=self._location_mode, _hosts=self._hosts, require_encryption=self.require_encryption, + encryption_version=self.encryption_version, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function, _pipeline=_pipeline) + + @distributed_trace + def get_container_access_policy(self, **kwargs: Any) -> Dict[str, Any]: """Gets the permissions for the specified container. The permissions indicate whether container data may be accessed publicly. @@ -535,7 +666,11 @@ def get_container_access_policy(self, **kwargs): container's lease is active and matches this ID. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Access policy information in a dict. :rtype: dict[str, Any] @@ -566,10 +701,10 @@ def get_container_access_policy(self, **kwargs): @distributed_trace def set_container_access_policy( - self, signed_identifiers, # type: Dict[str, AccessPolicy] - public_access=None, # type: Optional[Union[str, PublicAccess]] - **kwargs - ): # type: (...) -> Dict[str, Union[str, datetime]] + self, signed_identifiers: Dict[str, "AccessPolicy"], + public_access: Optional[Union[str, "PublicAccess"]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets the permissions for the specified container or stored access policies that may be used with Shared Access Signatures. The permissions indicate whether blobs in a container may be accessed publicly. @@ -598,7 +733,11 @@ def set_container_access_policy( Specify this header to perform the operation only if the resource has not been modified since the specified date/time. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Container-updated property dict (Etag and last modified). :rtype: dict[str, str or ~datetime.datetime] @@ -627,20 +766,23 @@ def set_container_access_policy( access_conditions = get_access_conditions(lease) timeout = kwargs.pop('timeout', None) try: - return self._client.container.set_access_policy( + return cast(Dict[str, Union[str, datetime]], self._client.container.set_access_policy( container_acl=signed_identifiers or None, timeout=timeout, access=public_access, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, cls=return_response_headers, - **kwargs) + **kwargs)) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def list_blobs(self, name_starts_with=None, include=None, **kwargs): - # type: (Optional[str], Optional[Union[str, List[str]]], **Any) -> ItemPaged[BlobProperties] + def list_blobs( + self, name_starts_with: Optional[str] = None, + include: Optional[Union[str, List[str]]] = None, + **kwargs: Any + ) -> ItemPaged[BlobProperties]: """Returns a generator to list the blobs under the specified container. The generator will lazily follow the continuation tokens returned by the service. @@ -648,11 +790,17 @@ def list_blobs(self, name_starts_with=None, include=None, **kwargs): :param str name_starts_with: Filters the results to return only blobs whose names begin with the specified prefix. - :param list[str] or str include: + :param include: Specifies one or more additional datasets to include in the response. - Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'tags'. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', + 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. + :type include: list[str] or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) response of BlobProperties. :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] @@ -665,6 +813,10 @@ def list_blobs(self, name_starts_with=None, include=None, **kwargs): :dedent: 8 :caption: List the blobs in the container. """ + if kwargs.pop('prefix', None): + raise ValueError("Passing 'prefix' has no effect on filtering, " + + "please use the 'name_starts_with' parameter instead.") + if include and not isinstance(include, list): include = [include] @@ -676,17 +828,62 @@ def list_blobs(self, name_starts_with=None, include=None, **kwargs): timeout=timeout, **kwargs) return ItemPaged( - command, prefix=name_starts_with, results_per_page=results_per_page, + command, prefix=name_starts_with, results_per_page=results_per_page, container=self.container_name, page_iterator_class=BlobPropertiesPaged) + @distributed_trace + def list_blob_names(self, **kwargs: Any) -> ItemPaged[str]: + """Returns a generator to list the names of blobs under the specified container. + The generator will lazily follow the continuation tokens returned by + the service. + + Note that no additional properties or metadata will be returned when using this API. + Additionally, this API does not have an option to include additional blobs such as snapshots, + versions, soft-deleted blobs, etc. To get any of this data, use :func:`list_blobs()`. + + :keyword str name_starts_with: + Filters the results to return only blobs whose names + begin with the specified prefix. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: An iterable (auto-paging) response of blob names as strings. + :rtype: ~azure.core.paging.ItemPaged[str] + """ + if kwargs.pop('prefix', None): + raise ValueError("Passing 'prefix' has no effect on filtering, " + + "please use the 'name_starts_with' parameter instead.") + + name_starts_with = kwargs.pop('name_starts_with', None) + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + + # For listing only names we need to create a one-off generated client and + # override its deserializer to prevent deserialization of the full response. + client = self._build_generated_client() + client.container._deserialize = IgnoreListBlobsDeserializer() # pylint: disable=protected-access + + command = functools.partial( + client.container.list_blob_flat_segment, + timeout=timeout, + **kwargs) + return ItemPaged( + command, + prefix=name_starts_with, + results_per_page=results_per_page, + container=self.container_name, + page_iterator_class=BlobNamesPaged) + @distributed_trace def walk_blobs( - self, name_starts_with=None, # type: Optional[str] - include=None, # type: Optional[Any] - delimiter="/", # type: str - **kwargs # type: Optional[Any] - ): - # type: (...) -> ItemPaged[BlobProperties] + self, name_starts_with: Optional[str] = None, + include: Optional[Union[List[str], str]] = None, + delimiter: str = "/", + **kwargs: Any + ) -> ItemPaged[BlobProperties]: """Returns a generator to list the blobs under the specified container. The generator will lazily follow the continuation tokens returned by the service. This operation will list blobs in accordance with a hierarchy, @@ -695,19 +892,29 @@ def walk_blobs( :param str name_starts_with: Filters the results to return only blobs whose names begin with the specified prefix. - :param list[str] include: + :param include: Specifies one or more additional datasets to include in the response. - Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted'. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', + 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. + :type include: list[str] or str :param str delimiter: When the request includes this parameter, the operation returns a BlobPrefix element in the response body that acts as a placeholder for all blobs whose names begin with the same substring up to the appearance of the delimiter character. The delimiter may be a single character or a string. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) response of BlobProperties. :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] """ + if kwargs.pop('prefix', None): + raise ValueError("Passing 'prefix' has no effect on filtering, " + + "please use the 'name_starts_with' parameter instead.") + if include and not isinstance(include, list): include = [include] @@ -723,24 +930,58 @@ def walk_blobs( command, prefix=name_starts_with, results_per_page=results_per_page, + container=self.container_name, delimiter=delimiter) + @distributed_trace + def find_blobs_by_tags( + self, filter_expression: str, + **kwargs: Any + ) -> ItemPaged[FilteredBlob]: + """Returns a generator to list the blobs under the specified container whose tags + match the given search expression. + The generator will lazily follow the continuation tokens returned by + the service. + + :param str filter_expression: + The expression to find blobs whose tags matches the specified condition. + eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" + :keyword int results_per_page: + The max result per page when paginating. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: An iterable (auto-paging) response of FilteredBlob. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] + """ + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.container.filter_blobs, + timeout=timeout, + where=filter_expression, + **kwargs) + return ItemPaged( + command, results_per_page=results_per_page, container=self.container_name, + page_iterator_class=FilteredBlobPaged) + @distributed_trace def upload_blob( - self, name, # type: Union[str, BlobProperties] - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] - length=None, # type: Optional[int] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> BlobClient + self, name: str, + data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, + length: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs + ) -> BlobClient: """Creates a new blob from a data source with automatic chunking. - :param name: The blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type name: str or ~azure.storage.blob.BlobProperties + :param str name: The blob with which to interact. :param data: The blob data to upload. + :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. :param int length: @@ -795,9 +1036,12 @@ def upload_blob( .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make - multiple calls to the Azure service and the timeout will apply to - each call individually. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -829,6 +1073,11 @@ def upload_blob( :keyword str encoding: Defaults to UTF-8. + :keyword progress_hook: + A callback to track the progress of a long running upload. The signature is + function(current: int, total: Optional[int]) where current is the number of bytes transferred + so far, and total is the size of the blob or None if the size is unknown. + :paramtype progress_hook: Callable[[int, Optional[int]], None] :returns: A BlobClient to interact with the newly uploaded blob. :rtype: ~azure.storage.blob.BlobClient @@ -841,6 +1090,12 @@ def upload_blob( :dedent: 8 :caption: Upload blob to the container. """ + if isinstance(name, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param name is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob = self.get_blob_client(name) kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) @@ -858,11 +1113,10 @@ def upload_blob( @distributed_trace def delete_blob( - self, blob, # type: Union[str, BlobProperties] - delete_snapshots=None, # type: Optional[str] - **kwargs - ): - # type: (...) -> None + self, blob: str, + delete_snapshots: Optional[str] = None, + **kwargs: Any + ) -> None: """Marks the specified blob or snapshot for deletion. The blob is later deleted during garbage collection. @@ -874,11 +1128,9 @@ def delete_blob( and retains the blob or snapshot for specified number of days. After specified number of days, blob's data is removed from the service during garbage collection. Soft deleted blob or snapshot is accessible through :func:`list_blobs()` specifying `include=["deleted"]` - option. Soft-deleted blob or snapshot can be restored using :func:`~BlobClient.undelete()` + option. Soft-deleted blob or snapshot can be restored using :func:`~azure.storage.blob.BlobClient.undelete()` - :param blob: The blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type blob: str or ~azure.storage.blob.BlobProperties + :param str blob: The blob with which to interact. :param str delete_snapshots: Required if the blob has associated snapshots. Values include: - "only": Deletes only the blobs snapshots. @@ -888,6 +1140,7 @@ def delete_blob( value that, when present, specifies the version of the blob to delete. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword lease: @@ -918,9 +1171,19 @@ def delete_blob( .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None """ + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob_client = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) @@ -929,22 +1192,56 @@ def delete_blob( timeout=timeout, **kwargs) + @overload + def download_blob( + self, blob: str, + offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: str, + **kwargs: Any + ) -> StorageStreamDownloader[str]: + ... + + @overload + def download_blob( + self, blob: str, + offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: None = None, + **kwargs: Any + ) -> StorageStreamDownloader[bytes]: + ... + @distributed_trace - def download_blob(self, blob, offset=None, length=None, **kwargs): - # type: (Union[str, BlobProperties], Optional[int], Optional[int], **Any) -> StorageStreamDownloader + def download_blob( + self, blob: str, + offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: Union[str, None] = None, + **kwargs: Any + ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: """Downloads a blob to the StorageStreamDownloader. The readall() method must be used to read all the content or readinto() must be used to download the blob into - a stream. + a stream. Using chunks() returns an iterator which allows the user to iterate over the content in chunks. - :param blob: The blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type blob: str or ~azure.storage.blob.BlobProperties + :param str blob: The blob with which to interact. :param int offset: Start of byte range to use for downloading a section of the blob. Must be set if length is provided. :param int length: Number of bytes to read from the stream. This is optional, but should be supplied for optimal performance. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to download. + + .. versionadded:: 12.4.0 + + This keyword argument was introduced in API version '2019-12-12'. + :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the blob. The storage service checks the hash of the content that has arrived with the hash @@ -991,136 +1288,42 @@ def download_blob(self, blob, offset=None, length=None, **kwargs): The number of parallel connections with which to download. :keyword str encoding: Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword progress_hook: + A callback to track the progress of a long running download. The signature is + function(current: int, total: int) where current is the number of bytes transferred + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], None] :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. multiple calls to the Azure service and the timeout will apply to each call individually. :returns: A streaming object (StorageStreamDownloader) :rtype: ~azure.storage.blob.StorageStreamDownloader """ + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob_client = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) - return blob_client.download_blob(offset=offset, length=length, **kwargs) - - def _generate_delete_blobs_subrequest_options( - self, snapshot=None, - delete_snapshots=None, - lease_access_conditions=None, - modified_access_conditions=None, - **kwargs - ): - """This code is a copy from _generated. - - Once Autorest is able to provide request preparation this code should be removed. - """ - lease_id = None - if lease_access_conditions is not None: - lease_id = lease_access_conditions.lease_id - if_modified_since = None - if modified_access_conditions is not None: - if_modified_since = modified_access_conditions.if_modified_since - if_unmodified_since = None - if modified_access_conditions is not None: - if_unmodified_since = modified_access_conditions.if_unmodified_since - if_match = None - if modified_access_conditions is not None: - if_match = modified_access_conditions.if_match - if_none_match = None - if modified_access_conditions is not None: - if_none_match = modified_access_conditions.if_none_match - if_tags = None - if modified_access_conditions is not None: - if_tags = modified_access_conditions.if_tags - - # Construct parameters - timeout = kwargs.pop('timeout', None) - query_parameters = {} - if snapshot is not None: - query_parameters['snapshot'] = self._client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access - if timeout is not None: - query_parameters['timeout'] = self._client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access - - # Construct headers - header_parameters = {} - if delete_snapshots is not None: - header_parameters['x-ms-delete-snapshots'] = self._client._serialize.header( # pylint: disable=protected-access - "delete_snapshots", delete_snapshots, 'DeleteSnapshotsOptionType') - if lease_id is not None: - header_parameters['x-ms-lease-id'] = self._client._serialize.header( # pylint: disable=protected-access - "lease_id", lease_id, 'str') - if if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._client._serialize.header( # pylint: disable=protected-access - "if_modified_since", if_modified_since, 'rfc-1123') - if if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._client._serialize.header( # pylint: disable=protected-access - "if_unmodified_since", if_unmodified_since, 'rfc-1123') - if if_match is not None: - header_parameters['If-Match'] = self._client._serialize.header( # pylint: disable=protected-access - "if_match", if_match, 'str') - if if_none_match is not None: - header_parameters['If-None-Match'] = self._client._serialize.header( # pylint: disable=protected-access - "if_none_match", if_none_match, 'str') - if if_tags is not None: - header_parameters['x-ms-if-tags'] = self._client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access - - return query_parameters, header_parameters - - def _generate_delete_blobs_options(self, - *blobs, # type: List[Union[str, BlobProperties, dict]] - **kwargs - ): - timeout = kwargs.pop('timeout', None) - raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) - delete_snapshots = kwargs.pop('delete_snapshots', None) - if_modified_since = kwargs.pop('if_modified_since', None) - if_unmodified_since = kwargs.pop('if_unmodified_since', None) - if_tags_match_condition = kwargs.pop('if_tags_match_condition', None) - kwargs.update({'raise_on_any_failure': raise_on_any_failure, - 'sas': self._query_str.replace('?', '&'), - 'timeout': '&timeout=' + str(timeout) if timeout else "" - }) - - reqs = [] - for blob in blobs: - blob_name = _get_blob_name(blob) - container_name = self.container_name - - try: - options = BlobClient._generic_delete_blob_options( # pylint: disable=protected-access - snapshot=blob.get('snapshot'), - delete_snapshots=delete_snapshots or blob.get('delete_snapshots'), - lease=blob.get('lease_id'), - if_modified_since=if_modified_since or blob.get('if_modified_since'), - if_unmodified_since=if_unmodified_since or blob.get('if_unmodified_since'), - etag=blob.get('etag'), - if_tags_match_condition=if_tags_match_condition or blob.get('if_tags_match_condition'), - match_condition=blob.get('match_condition') or MatchConditions.IfNotModified if blob.get('etag') - else None, - timeout=blob.get('timeout'), - ) - except AttributeError: - options = BlobClient._generic_delete_blob_options( # pylint: disable=protected-access - delete_snapshots=delete_snapshots, - if_modified_since=if_modified_since, - if_unmodified_since=if_unmodified_since, - if_tags_match_condition=if_tags_match_condition - ) - - query_parameters, header_parameters = self._generate_delete_blobs_subrequest_options(**options) - - req = HttpRequest( - "DELETE", - "/{}/{}{}".format(quote(container_name), quote(blob_name, safe='/~'), self._query_str), - headers=header_parameters - ) - req.format_parameters(query_parameters) - reqs.append(req) - - return reqs, kwargs + return blob_client.download_blob( + offset=offset, + length=length, + encoding=encoding, + **kwargs) @distributed_trace - def delete_blobs(self, *blobs, **kwargs): - # type: (...) -> Iterator[HttpResponse] + def delete_blobs( # pylint: disable=delete-operation-wrong-return-type + self, *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any + ) -> Iterator["HttpResponse"]: """Marks the specified blobs or snapshots for deletion. The blobs are later deleted during garbage collection. @@ -1131,7 +1334,9 @@ def delete_blobs(self, *blobs, **kwargs): and retains the blobs or snapshots for specified number of days. After specified number of days, blobs' data is removed from the service during garbage collection. Soft deleted blobs or snapshots are accessible through :func:`list_blobs()` specifying `include=["deleted"]` - Soft-deleted blobs or snapshots can be restored using :func:`~BlobClient.undelete()` + Soft-deleted blobs or snapshots can be restored using :func:`~azure.storage.blob.BlobClient.undelete()` + + The maximum number of blobs that can be deleted in a single request is 256. :param blobs: The blobs to delete. This can be a single blob, or multiple values can @@ -1144,6 +1349,8 @@ def delete_blobs(self, *blobs, **kwargs): key: 'name', value type: str snapshot you want to delete: key: 'snapshot', value type: str + version id: + key: 'version_id', value type: str whether to delete snapshots when deleting blob: key: 'delete_snapshots', value: 'include' or 'only' if the blob modified or not: @@ -1159,7 +1366,7 @@ def delete_blobs(self, *blobs, **kwargs): timeout for subrequest: key: 'timeout', value type: int - :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :type blobs: Union[str, Dict[str, Any], BlobProperties] :keyword str delete_snapshots: Required if a blob has associated snapshots. Values include: - "only": Deletes only the blobs snapshots. @@ -1186,7 +1393,11 @@ def delete_blobs(self, *blobs, **kwargs): This is a boolean param which defaults to True. When this is set, an exception is raised even if there is a single operation failure. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: An iterator of responses, one for each blob in order :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] @@ -1200,111 +1411,33 @@ def delete_blobs(self, *blobs, **kwargs): :caption: Deleting multiple blobs. """ if len(blobs) == 0: - return iter(list()) - - reqs, options = self._generate_delete_blobs_options(*blobs, **kwargs) + return iter([]) + if self._is_localhost: + kwargs['url_prepend'] = self.account_name + + reqs, options = _generate_delete_blobs_options( + self._query_str, + self.container_name, + self._client, + *blobs, + **kwargs + ) return self._batch_send(*reqs, **options) - def _generate_set_tiers_subrequest_options( - self, tier, snapshot=None, version_id=None, rehydrate_priority=None, lease_access_conditions=None, **kwargs - ): - """This code is a copy from _generated. - - Once Autorest is able to provide request preparation this code should be removed. - """ - if not tier: - raise ValueError("A blob tier must be specified") - if snapshot and version_id: - raise ValueError("Snapshot and version_id cannot be set at the same time") - if_tags = kwargs.pop('if_tags', None) - - lease_id = None - if lease_access_conditions is not None: - lease_id = lease_access_conditions.lease_id - - comp = "tier" - timeout = kwargs.pop('timeout', None) - # Construct parameters - query_parameters = {} - if snapshot is not None: - query_parameters['snapshot'] = self._client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access - if version_id is not None: - query_parameters['versionid'] = self._client._serialize.query("version_id", version_id, 'str') # pylint: disable=protected-access - if timeout is not None: - query_parameters['timeout'] = self._client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access - query_parameters['comp'] = self._client._serialize.query("comp", comp, 'str') # pylint: disable=protected-access, specify-parameter-names-in-call - - # Construct headers - header_parameters = {} - header_parameters['x-ms-access-tier'] = self._client._serialize.header("tier", tier, 'str') # pylint: disable=protected-access, specify-parameter-names-in-call - if rehydrate_priority is not None: - header_parameters['x-ms-rehydrate-priority'] = self._client._serialize.header( # pylint: disable=protected-access - "rehydrate_priority", rehydrate_priority, 'str') - if lease_id is not None: - header_parameters['x-ms-lease-id'] = self._client._serialize.header("lease_id", lease_id, 'str') # pylint: disable=protected-access - if if_tags is not None: - header_parameters['x-ms-if-tags'] = self._client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access - - return query_parameters, header_parameters - - def _generate_set_tiers_options(self, - blob_tier, # type: Optional[Union[str, StandardBlobTier, PremiumPageBlobTier]] - *blobs, # type: List[Union[str, BlobProperties, dict]] - **kwargs - ): - timeout = kwargs.pop('timeout', None) - raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) - rehydrate_priority = kwargs.pop('rehydrate_priority', None) - if_tags = kwargs.pop('if_tags_match_condition', None) - kwargs.update({'raise_on_any_failure': raise_on_any_failure, - 'sas': self._query_str.replace('?', '&'), - 'timeout': '&timeout=' + str(timeout) if timeout else "" - }) - - reqs = [] - for blob in blobs: - blob_name = _get_blob_name(blob) - container_name = self.container_name - - try: - tier = blob_tier or blob.get('blob_tier') - query_parameters, header_parameters = self._generate_set_tiers_subrequest_options( - tier=tier, - snapshot=blob.get('snapshot'), - version_id=blob.get('version_id'), - rehydrate_priority=rehydrate_priority or blob.get('rehydrate_priority'), - lease_access_conditions=blob.get('lease_id'), - if_tags=if_tags or blob.get('if_tags_match_condition'), - timeout=timeout or blob.get('timeout') - ) - except AttributeError: - query_parameters, header_parameters = self._generate_set_tiers_subrequest_options( - blob_tier, rehydrate_priority=rehydrate_priority, if_tags=if_tags) - - req = HttpRequest( - "PUT", - "/{}/{}{}".format(quote(container_name), quote(blob_name, safe='/~'), self._query_str), - headers=header_parameters - ) - req.format_parameters(query_parameters) - reqs.append(req) - - return reqs, kwargs - @distributed_trace def set_standard_blob_tier_blobs( - self, - standard_blob_tier, # type: Optional[Union[str, StandardBlobTier]] - *blobs, # type: List[Union[str, BlobProperties, dict]] - **kwargs - ): - # type: (...) -> Iterator[HttpResponse] + self, standard_blob_tier: Optional[Union[str, "StandardBlobTier"]], + *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any + ) -> Iterator["HttpResponse"]: """This operation sets the tier on block blobs. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's ETag. + The maximum number of blobs that can be updated in a single request is 256. + :param standard_blob_tier: Indicates the tier to be set on all blobs. Options include 'Hot', 'Cool', 'Archive'. The hot tier is optimized for storing data that is accessed @@ -1334,7 +1467,7 @@ def set_standard_blob_tier_blobs( lease: key: 'lease_id', value type: Union[str, LeaseClient] snapshot: - key: "snapshost", value type: str + key: "snapshot", value type: str version id: key: "version_id", value type: str tags match condition: @@ -1342,7 +1475,7 @@ def set_standard_blob_tier_blobs( timeout for subrequest: key: 'timeout', value type: int - :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: Indicates the priority with which to rehydrate an archived blob :keyword str if_tags_match_condition: @@ -1352,27 +1485,39 @@ def set_standard_blob_tier_blobs( .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword bool raise_on_any_failure: This is a boolean param which defaults to True. When this is set, an exception is raised even if there is a single operation failure. :return: An iterator of responses, one for each blob in order :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] """ - reqs, options = self._generate_set_tiers_options(standard_blob_tier, *blobs, **kwargs) + if self._is_localhost: + kwargs['url_prepend'] = self.account_name + reqs, options = _generate_set_tiers_options( + self._query_str, + self.container_name, + standard_blob_tier, + self._client, + *blobs, + **kwargs) return self._batch_send(*reqs, **options) @distributed_trace def set_premium_page_blob_tier_blobs( - self, - premium_page_blob_tier, # type: Optional[Union[str, PremiumPageBlobTier]] - *blobs, # type: List[Union[str, BlobProperties, dict]] - **kwargs - ): - # type: (...) -> Iterator[HttpResponse] + self, premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]], + *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any + ) -> Iterator["HttpResponse"]: """Sets the page blob tiers on all blobs. This API is only supported for page blobs on premium accounts. + The maximum number of blobs that can be updated in a single request is 256. + :param premium_page_blob_tier: A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -1399,36 +1544,48 @@ def set_premium_page_blob_tier_blobs( timeout for subrequest: key: 'timeout', value type: int - :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make - multiple calls to the Azure service and the timeout will apply to - each call individually. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword bool raise_on_any_failure: This is a boolean param which defaults to True. When this is set, an exception is raised even if there is a single operation failure. :return: An iterator of responses, one for each blob in order - :rtype: iterator[~azure.core.pipeline.transport.HttpResponse] + :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] """ - reqs, options = self._generate_set_tiers_options(premium_page_blob_tier, *blobs, **kwargs) + if self._is_localhost: + kwargs['url_prepend'] = self.account_name + reqs, options = _generate_set_tiers_options( + self._query_str, + self.container_name, + premium_page_blob_tier, + self._client, + *blobs, + **kwargs) return self._batch_send(*reqs, **options) def get_blob_client( - self, blob, # type: Union[str, BlobProperties] - snapshot=None # type: str - ): - # type: (...) -> BlobClient + self, blob: str, + snapshot: Optional[str] = None, + *, + version_id: Optional[str] = None + ) -> BlobClient: """Get a client to interact with the specified blob. The blob need not already exist. - :param blob: + :param str blob: The blob with which to interact. - :type blob: str or ~azure.storage.blob.BlobProperties :param str snapshot: The optional blob snapshot on which to operate. This can be the snapshot ID string or the response returned from :func:`~BlobClient.create_snapshot()`. + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. :returns: A BlobClient. :rtype: ~azure.storage.blob.BlobClient @@ -1441,7 +1598,15 @@ def get_blob_client( :dedent: 8 :caption: Get the blob client. """ - blob_name = _get_blob_name(blob) + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) + blob_name = blob.get('name') + else: + blob_name = blob _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access policies=self._pipeline._impl_policies # pylint: disable = protected-access @@ -1450,5 +1615,6 @@ def get_blob_client( self.url, container_name=self.container_name, blob_name=blob_name, snapshot=snapshot, credential=self.credential, api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function) + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, + version_id=version_id) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_container_client_helpers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_container_client_helpers.py new file mode 100644 index 000000000000..82edd48dffb8 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_container_client_helpers.py @@ -0,0 +1,266 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union +from urllib.parse import quote, urlparse + +from azure.core import MatchConditions +from azure.core.pipeline.transport import HttpRequest +from ._blob_client_helpers import _generic_delete_blob_options +from ._generated import AzureBlobStorage +from ._models import BlobProperties +from ._shared.base_client import parse_query + +if TYPE_CHECKING: + from azure.storage.blob import RehydratePriority + from urllib.parse import ParseResult + from ._generated.models import LeaseAccessConditions, ModifiedAccessConditions + from ._models import PremiumPageBlobTier, StandardBlobTier + + +def _parse_url(account_url: str, container_name: str) -> Tuple["ParseResult", Any]: + try: + if not account_url.lower().startswith('http'): + account_url = "https://" + account_url + except AttributeError as exc: + raise ValueError("Container URL must be a string.") from exc + parsed_url = urlparse(account_url.rstrip('/')) + if not container_name: + raise ValueError("Please specify a container name.") + if not parsed_url.netloc: + raise ValueError(f"Invalid URL: {account_url}") + + _, sas_token = parse_query(parsed_url.query) + + return parsed_url, sas_token + +def _format_url(container_name: Union[bytes, str], hostname: str, scheme: str, query_str: str) -> str: + if isinstance(container_name, str): + container_name = container_name.encode('UTF-8') + return f"{scheme}://{hostname}/{quote(container_name)}{query_str}" + +# This code is a copy from _generated. +# Once Autorest is able to provide request preparation this code should be removed. +def _generate_delete_blobs_subrequest_options( + client: AzureBlobStorage, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + delete_snapshots: Optional[str] = None, + lease_access_conditions: Optional["LeaseAccessConditions"] = None, + modified_access_conditions: Optional["ModifiedAccessConditions"] = None, + **kwargs +) -> Tuple[Dict[str, Any], Dict[str, Any]]: + lease_id = None + if lease_access_conditions is not None: + lease_id = lease_access_conditions.lease_id + if_modified_since = None + if modified_access_conditions is not None: + if_modified_since = modified_access_conditions.if_modified_since + if_unmodified_since = None + if modified_access_conditions is not None: + if_unmodified_since = modified_access_conditions.if_unmodified_since + if_match = None + if modified_access_conditions is not None: + if_match = modified_access_conditions.if_match + if_none_match = None + if modified_access_conditions is not None: + if_none_match = modified_access_conditions.if_none_match + if_tags = None + if modified_access_conditions is not None: + if_tags = modified_access_conditions.if_tags + + # Construct parameters + timeout = kwargs.pop('timeout', None) + query_parameters = {} + if snapshot is not None: + query_parameters['snapshot'] = client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access + if version_id is not None: + query_parameters['versionid'] = client._serialize.query("version_id", version_id, 'str') # pylint: disable=protected-access + if timeout is not None: + query_parameters['timeout'] = client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access + + # Construct headers + header_parameters = {} + if delete_snapshots is not None: + header_parameters['x-ms-delete-snapshots'] = client._serialize.header( # pylint: disable=protected-access + "delete_snapshots", delete_snapshots, 'DeleteSnapshotsOptionType') + if lease_id is not None: + header_parameters['x-ms-lease-id'] = client._serialize.header( # pylint: disable=protected-access + "lease_id", lease_id, 'str') + if if_modified_since is not None: + header_parameters['If-Modified-Since'] = client._serialize.header( # pylint: disable=protected-access + "if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = client._serialize.header( # pylint: disable=protected-access + "if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + header_parameters['If-Match'] = client._serialize.header( # pylint: disable=protected-access + "if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = client._serialize.header( # pylint: disable=protected-access + "if_none_match", if_none_match, 'str') + if if_tags is not None: + header_parameters['x-ms-if-tags'] = client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access + + return query_parameters, header_parameters + +def _generate_delete_blobs_options( + query_str: str, + container_name: str, + client: AzureBlobStorage, + *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any +) -> Tuple[List[HttpRequest], Dict[str, Any]]: + timeout = kwargs.pop('timeout', None) + raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) + delete_snapshots = kwargs.pop('delete_snapshots', None) + if_modified_since = kwargs.pop('if_modified_since', None) + if_unmodified_since = kwargs.pop('if_unmodified_since', None) + if_tags_match_condition = kwargs.pop('if_tags_match_condition', None) + url_prepend = kwargs.pop('url_prepend', None) + kwargs.update({'raise_on_any_failure': raise_on_any_failure, + 'sas': query_str.replace('?', '&'), + 'timeout': '&timeout=' + str(timeout) if timeout else "", + 'path': container_name, + 'restype': 'restype=container&' + }) + + reqs = [] + for blob in blobs: + if not isinstance(blob, str): + blob_name = blob.get('name') + options = _generic_delete_blob_options( + snapshot=blob.get('snapshot'), + version_id=blob.get('version_id'), + delete_snapshots=delete_snapshots or blob.get('delete_snapshots'), + lease=blob.get('lease_id'), + if_modified_since=if_modified_since or blob.get('if_modified_since'), + if_unmodified_since=if_unmodified_since or blob.get('if_unmodified_since'), + etag=blob.get('etag'), + if_tags_match_condition=if_tags_match_condition or blob.get('if_tags_match_condition'), + match_condition=blob.get('match_condition') or MatchConditions.IfNotModified if blob.get('etag') + else None, + timeout=blob.get('timeout'), + ) + else: + blob_name = blob + options = _generic_delete_blob_options( + delete_snapshots=delete_snapshots, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags_match_condition=if_tags_match_condition + ) + + query_parameters, header_parameters = _generate_delete_blobs_subrequest_options(client, **options) + + req = HttpRequest( + "DELETE", + (f"{'/' + quote(url_prepend) if url_prepend else ''}/" + f"{quote(container_name)}/{quote(str(blob_name), safe='/~')}{query_str}"), + headers=header_parameters + ) + + req.format_parameters(query_parameters) + reqs.append(req) + + return reqs, kwargs + +# This code is a copy from _generated. +# Once Autorest is able to provide request preparation this code should be removed. +def _generate_set_tiers_subrequest_options( + client: AzureBlobStorage, + tier: Optional[Union["PremiumPageBlobTier", "StandardBlobTier", str]], + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + rehydrate_priority: Optional["RehydratePriority"] = None, + lease_access_conditions: Optional["LeaseAccessConditions"] = None, + **kwargs: Any +) -> Tuple[Dict[str, Any], Dict[str, Any]]: + if not tier: + raise ValueError("A blob tier must be specified") + if snapshot and version_id: + raise ValueError("Snapshot and version_id cannot be set at the same time") + if_tags = kwargs.pop('if_tags', None) + + lease_id = None + if lease_access_conditions is not None: + lease_id = lease_access_conditions.lease_id + + comp = "tier" + timeout = kwargs.pop('timeout', None) + # Construct parameters + query_parameters = {} + if snapshot is not None: + query_parameters['snapshot'] = client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access + if version_id is not None: + query_parameters['versionid'] = client._serialize.query("version_id", version_id, 'str') # pylint: disable=protected-access + if timeout is not None: + query_parameters['timeout'] = client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access + query_parameters['comp'] = client._serialize.query("comp", comp, 'str') # pylint: disable=protected-access + + # Construct headers + header_parameters = {} + header_parameters['x-ms-access-tier'] = client._serialize.header("tier", tier, 'str') # pylint: disable=protected-access + if rehydrate_priority is not None: + header_parameters['x-ms-rehydrate-priority'] = client._serialize.header( # pylint: disable=protected-access + "rehydrate_priority", rehydrate_priority, 'str') + if lease_id is not None: + header_parameters['x-ms-lease-id'] = client._serialize.header("lease_id", lease_id, 'str') # pylint: disable=protected-access + if if_tags is not None: + header_parameters['x-ms-if-tags'] = client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access + + return query_parameters, header_parameters + +def _generate_set_tiers_options( + query_str: str, + container_name: str, + blob_tier: Optional[Union["PremiumPageBlobTier", "StandardBlobTier", str]], + client: AzureBlobStorage, + *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any +) -> Tuple[List[HttpRequest], Dict[str, Any]]: + timeout = kwargs.pop('timeout', None) + raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) + rehydrate_priority = kwargs.pop('rehydrate_priority', None) + if_tags = kwargs.pop('if_tags_match_condition', None) + url_prepend = kwargs.pop('url_prepend', None) + kwargs.update({'raise_on_any_failure': raise_on_any_failure, + 'sas': query_str.replace('?', '&'), + 'timeout': '&timeout=' + str(timeout) if timeout else "", + 'path': container_name, + 'restype': 'restype=container&' + }) + + reqs = [] + for blob in blobs: + if not isinstance(blob, str): + blob_name = blob.get('name') + tier = blob_tier or blob.get('blob_tier') + query_parameters, header_parameters = _generate_set_tiers_subrequest_options( + client=client, + tier=tier, + snapshot=blob.get('snapshot'), + version_id=blob.get('version_id'), + rehydrate_priority=rehydrate_priority or blob.get('rehydrate_priority'), + lease_access_conditions=blob.get('lease_id'), + if_tags=if_tags or blob.get('if_tags_match_condition'), + timeout=timeout or blob.get('timeout') + ) + else: + blob_name = blob + query_parameters, header_parameters = _generate_set_tiers_subrequest_options( + client, blob_tier, rehydrate_priority=rehydrate_priority, if_tags=if_tags) + + req = HttpRequest( + "PUT", + (f"{'/' + quote(url_prepend) if url_prepend else ''}/" + f"{quote(container_name)}/{quote(str(blob_name), safe='/~')}{query_str}"), + headers=header_parameters + ) + req.format_parameters(query_parameters) + reqs.append(req) + + return reqs, kwargs diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_deserialize.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_deserialize.py index ae65c840f238..b6ee916097a1 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_deserialize.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_deserialize.py @@ -3,24 +3,42 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=no-self-use -from typing import ( # pylint: disable=unused-import - Tuple, Dict, List, - TYPE_CHECKING -) -from ._models import BlobType, CopyProperties, ContentSettings, LeaseProperties, BlobProperties +from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING +from urllib.parse import unquote +from xml.etree.ElementTree import Element + +from ._models import ( + BlobAnalyticsLogging, + BlobProperties, + BlobType, + ContainerProperties, + ContentSettings, + CopyProperties, + CorsRule, + ImmutabilityPolicy, + LeaseProperties, + Metrics, + ObjectReplicationPolicy, + ObjectReplicationRule, + RetentionPolicy, + StaticWebsite +) from ._shared.models import get_enum_value - from ._shared.response_handlers import deserialize_metadata -from ._models import ContainerProperties, BlobAnalyticsLogging, Metrics, CorsRule, RetentionPolicy, \ - StaticWebsite, ObjectReplicationPolicy, ObjectReplicationRule if TYPE_CHECKING: - from ._generated.models import PageList - + from azure.core.pipeline import PipelineResponse + from ._generated.models import ( + BlobItemInternal, + BlobTags, + PageList, + StorageServiceProperties, + StorageServiceStats, + ) + from ._shared.models import LocationMode -def deserialize_pipeline_response_into_cls(cls_method, response, obj, headers): +def deserialize_pipeline_response_into_cls(cls_method, response: "PipelineResponse", obj: Any, headers: Dict[str, Any]): try: deserialized_response = response.http_response except AttributeError: @@ -28,7 +46,7 @@ def deserialize_pipeline_response_into_cls(cls_method, response, obj, headers): return cls_method(deserialized_response, obj, headers) -def deserialize_blob_properties(response, obj, headers): +def deserialize_blob_properties(response: "PipelineResponse", obj: Any, headers: Dict[str, Any]) -> BlobProperties: blob_properties = BlobProperties( metadata=deserialize_metadata(response, obj, headers), object_replication_source_properties=deserialize_ors_policies(response.http_response.headers), @@ -42,7 +60,7 @@ def deserialize_blob_properties(response, obj, headers): return blob_properties -def deserialize_ors_policies(policy_dictionary): +def deserialize_ors_policies(policy_dictionary: Optional[Dict[str, str]]) -> Optional[List[ObjectReplicationPolicy]]: if policy_dictionary is None: return None @@ -52,7 +70,7 @@ def deserialize_ors_policies(policy_dictionary): or_policy_status_headers = {key: val for key, val in policy_dictionary.items() if 'or-' in key and key != 'x-ms-or-policy-id'} - parsed_result = {} + parsed_result: Dict[str, List[ObjectReplicationRule]] = {} for key, val in or_policy_status_headers.items(): # list blobs gives or-policy_rule and get blob properties gives x-ms-or-policy_rule @@ -69,13 +87,21 @@ def deserialize_ors_policies(policy_dictionary): return result_list -def deserialize_blob_stream(response, obj, headers): +def deserialize_blob_stream( + response: "PipelineResponse", + obj: Any, + headers: Dict[str, Any] +) -> Tuple["LocationMode", Any]: blob_properties = deserialize_blob_properties(response, obj, headers) obj.properties = blob_properties return response.http_response.location_mode, obj -def deserialize_container_properties(response, obj, headers): +def deserialize_container_properties( + response: "PipelineResponse", + obj: Any, + headers: Dict[str, Any] +) -> ContainerProperties: metadata = deserialize_metadata(response, obj, headers) container_properties = ContainerProperties( metadata=metadata, @@ -84,65 +110,70 @@ def deserialize_container_properties(response, obj, headers): return container_properties -def get_page_ranges_result(ranges): - # type: (PageList) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] - page_range = [] # type: ignore - clear_range = [] # type: List +def get_page_ranges_result(ranges: "PageList") -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: + page_range = [] + clear_range = [] if ranges.page_range: - page_range = [{'start': b.start, 'end': b.end} for b in ranges.page_range] # type: ignore + page_range = [{'start': b.start, 'end': b.end} for b in ranges.page_range] if ranges.clear_range: clear_range = [{'start': b.start, 'end': b.end} for b in ranges.clear_range] - return page_range, clear_range # type: ignore + return page_range, clear_range -def service_stats_deserialize(generated): - """Deserialize a ServiceStats objects into a dict. - """ +def service_stats_deserialize(generated: "StorageServiceStats") -> Dict[str, Any]: + status = None + last_sync_time = None + if generated.geo_replication is not None: + status = generated.geo_replication.status + last_sync_time = generated.geo_replication.last_sync_time return { 'geo_replication': { - 'status': generated.geo_replication.status, - 'last_sync_time': generated.geo_replication.last_sync_time, + 'status': status, + 'last_sync_time': last_sync_time } } - -def service_properties_deserialize(generated): - """Deserialize a ServiceProperties objects into a dict. - """ +def service_properties_deserialize(generated: "StorageServiceProperties") -> Dict[str, Any]: + cors_list = None + if generated.cors is not None: + cors_list = [CorsRule._from_generated(cors) for cors in generated.cors] # pylint: disable=protected-access return { 'analytics_logging': BlobAnalyticsLogging._from_generated(generated.logging), # pylint: disable=protected-access 'hour_metrics': Metrics._from_generated(generated.hour_metrics), # pylint: disable=protected-access 'minute_metrics': Metrics._from_generated(generated.minute_metrics), # pylint: disable=protected-access - 'cors': [CorsRule._from_generated(cors) for cors in generated.cors], # pylint: disable=protected-access - 'target_version': generated.default_service_version, # pylint: disable=protected-access + 'cors': cors_list, + 'target_version': generated.default_service_version, 'delete_retention_policy': RetentionPolicy._from_generated(generated.delete_retention_policy), # pylint: disable=protected-access 'static_website': StaticWebsite._from_generated(generated.static_website), # pylint: disable=protected-access } -def get_blob_properties_from_generated_code(generated): +def get_blob_properties_from_generated_code(generated: "BlobItemInternal") -> BlobProperties: blob = BlobProperties() - blob.name = generated.name + if generated.name.encoded and generated.name.content is not None: + blob.name = unquote(generated.name.content) + else: + blob.name = generated.name.content #type: ignore blob_type = get_enum_value(generated.properties.blob_type) - blob.blob_type = BlobType(blob_type) if blob_type else None + blob.blob_type = BlobType(blob_type) blob.etag = generated.properties.etag blob.deleted = generated.deleted blob.snapshot = generated.snapshot blob.is_append_blob_sealed = generated.properties.is_sealed - blob.metadata = generated.metadata.additional_properties if generated.metadata else {} + blob.metadata = generated.metadata.additional_properties if generated.metadata else {} # type: ignore [assignment] blob.encrypted_metadata = generated.metadata.encrypted if generated.metadata else None blob.lease = LeaseProperties._from_generated(generated) # pylint: disable=protected-access blob.copy = CopyProperties._from_generated(generated) # pylint: disable=protected-access blob.last_modified = generated.properties.last_modified - blob.creation_time = generated.properties.creation_time + blob.creation_time = generated.properties.creation_time # type: ignore [assignment] blob.content_settings = ContentSettings._from_generated(generated) # pylint: disable=protected-access - blob.size = generated.properties.content_length + blob.size = generated.properties.content_length # type: ignore [assignment] blob.page_blob_sequence_number = generated.properties.blob_sequence_number - blob.server_encrypted = generated.properties.server_encrypted + blob.server_encrypted = generated.properties.server_encrypted # type: ignore [assignment] blob.encryption_scope = generated.properties.encryption_scope blob.deleted_time = generated.properties.deleted_time blob.remaining_retention_days = generated.properties.remaining_retention_days - blob.blob_tier = generated.properties.access_tier + blob.blob_tier = generated.properties.access_tier # type: ignore [assignment] blob.rehydrate_priority = generated.properties.rehydrate_priority blob.blob_tier_inferred = generated.properties.access_tier_inferred blob.archive_status = generated.properties.archive_status @@ -150,17 +181,54 @@ def get_blob_properties_from_generated_code(generated): blob.version_id = generated.version_id blob.is_current_version = generated.is_current_version blob.tag_count = generated.properties.tag_count - blob.tags = parse_tags(generated.blob_tags) # pylint: disable=protected-access + blob.tags = parse_tags(generated.blob_tags) blob.object_replication_source_properties = deserialize_ors_policies(generated.object_replication_metadata) blob.last_accessed_on = generated.properties.last_accessed_on + blob.immutability_policy = ImmutabilityPolicy._from_generated(generated) # pylint: disable=protected-access + blob.has_legal_hold = generated.properties.legal_hold + blob.has_versions_only = generated.has_versions_only return blob - -def parse_tags(generated_tags): - # type: (Optional[List[BlobTag]]) -> Union[Dict[str, str], None] +def parse_tags(generated_tags: Optional["BlobTags"]) -> Optional[Dict[str, str]]: """Deserialize a list of BlobTag objects into a dict. + + :param Optional[BlobTags] generated_tags: + A list containing the BlobTag objects from generated code. + :returns: A dictionary of the BlobTag objects. + :rtype: Optional[Dict[str, str]] """ if generated_tags: tag_dict = {t.key: t.value for t in generated_tags.blob_tag_set} return tag_dict return None + + +def load_single_xml_node(element: Element, name: str) -> Optional[Element]: + return element.find(name) + + +def load_many_xml_nodes( + element: Element, + name: str, + wrapper: Optional[str] = None +) -> List[Optional[Element]]: + found_element: Optional[Element] = element + if wrapper: + found_element = load_single_xml_node(element, wrapper) + if found_element is None: + return [] + return list(found_element.findall(name)) + + +def load_xml_string(element: Element, name: str) -> Optional[str]: + node = element.find(name) + if node is None or not node.text: + return None + return node.text + + +def load_xml_int(element: Element, name: str) -> Optional[int]: + node = element.find(name) + if node is None or not node.text: + return None + return int(node.text) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_download.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_download.py index 70a4eb08dbc5..8b8f428f845b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_download.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_download.py @@ -3,54 +3,72 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - +import codecs import sys import threading +import time import warnings -from io import BytesIO - -from azure.core.exceptions import HttpResponseError +from io import BytesIO, StringIO +from typing import ( + Any, Callable, cast, Dict, Generator, + Generic, IO, Iterator, List, Optional, + overload, Tuple, TypeVar, Union, TYPE_CHECKING +) + +from azure.core.exceptions import DecodeError, HttpResponseError, IncompleteReadError from azure.core.tracing.common import with_current_context -from ._shared.encryption import decrypt_blob -from ._shared.request_handlers import validate_and_format_range_headers -from ._shared.response_handlers import process_storage_error, parse_length_from_content_range -from ._deserialize import get_page_ranges_result - -def process_range_and_offset(start_range, end_range, length, encryption): +from ._shared.request_handlers import validate_and_format_range_headers +from ._shared.response_handlers import parse_length_from_content_range, process_storage_error +from ._deserialize import deserialize_blob_properties, get_page_ranges_result +from ._encryption import ( + adjust_blob_size_for_encryption, + decrypt_blob, + get_adjusted_download_range_and_offset, + is_encryption_v2, + parse_encryption_data +) + +if TYPE_CHECKING: + from codecs import IncrementalDecoder + from ._encryption import _EncryptionData + from ._generated import AzureBlobStorage + from ._generated.operations import BlobOperations + from ._models import BlobProperties + from ._shared.models import StorageConfiguration + + +T = TypeVar('T', bytes, str) + + +def process_range_and_offset( + start_range: int, + end_range: int, + length: Optional[int], + encryption_options: Dict[str, Any], + encryption_data: Optional["_EncryptionData"] +) -> Tuple[Tuple[int, int], Tuple[int, int]]: start_offset, end_offset = 0, 0 - if encryption.get("key") is not None or encryption.get("resolver") is not None: - if start_range is not None: - # Align the start of the range along a 16 byte block - start_offset = start_range % 16 - start_range -= start_offset - - # Include an extra 16 bytes for the IV if necessary - # Because of the previous offsetting, start_range will always - # be a multiple of 16. - if start_range > 0: - start_offset += 16 - start_range -= 16 - - if length is not None: - # Align the end of the range along a 16 byte block - end_offset = 15 - (end_range % 16) - end_range += end_offset + if encryption_options.get("key") is not None or encryption_options.get("resolver") is not None: + return get_adjusted_download_range_and_offset( + start_range, + end_range, + length, + encryption_data) return (start_range, end_range), (start_offset, end_offset) -def process_content(data, start_offset, end_offset, encryption): +def process_content(data: Any, start_offset: int, end_offset: int, encryption: Dict[str, Any]) -> bytes: if data is None: raise ValueError("Response cannot be None.") - try: - content = b"".join(list(data)) - except Exception as error: - raise HttpResponseError(message="Download stream interrupted.", response=data.response, error=error) + + content = b"".join(list(data)) + if content and encryption.get("key") is not None or encryption.get("resolver") is not None: try: return decrypt_blob( - encryption.get("required"), + encryption.get("required") or False, encryption.get("key"), encryption.get("resolver"), content, @@ -59,26 +77,28 @@ def process_content(data, start_offset, end_offset, encryption): data.response.headers, ) except Exception as error: - raise HttpResponseError(message="Decryption failed.", response=data.response, error=error) + raise HttpResponseError(message="Decryption failed.", response=data.response, error=error) from error return content class _ChunkDownloader(object): # pylint: disable=too-many-instance-attributes def __init__( self, - client=None, - non_empty_ranges=None, - total_size=None, - chunk_size=None, - current_progress=None, - start_range=None, - end_range=None, - stream=None, - parallel=None, - validate_content=None, - encryption_options=None, - **kwargs - ): + client: "BlobOperations", + total_size: int, + chunk_size: int, + current_progress: int, + start_range: int, + end_range: int, + validate_content: bool, + encryption_options: Dict[str, Any], + encryption_data: Optional["_EncryptionData"] = None, + stream: Any = None, + parallel: Optional[int] = None, + non_empty_ranges: Optional[List[Dict[str, Any]]] = None, + progress_hook: Optional[Callable[[int, Optional[int]], None]] = None, + **kwargs: Any + ) -> None: self.client = client self.non_empty_ranges = non_empty_ranges @@ -92,54 +112,59 @@ def __init__( self.stream = stream self.stream_lock = threading.Lock() if parallel else None self.progress_lock = threading.Lock() if parallel else None + self.progress_hook = progress_hook # For a parallel download, the stream is always seekable, so we note down the current position # in order to seek to the right place when out-of-order chunks come in - self.stream_start = stream.tell() if parallel else None + self.stream_start = stream.tell() if parallel else 0 # Download progress so far self.progress_total = current_progress # Encryption self.encryption_options = encryption_options + self.encryption_data = encryption_data # Parameters for each get operation self.validate_content = validate_content self.request_options = kwargs - def _calculate_range(self, chunk_start): + def _calculate_range(self, chunk_start: int) -> Tuple[int, int]: if chunk_start + self.chunk_size > self.end_index: chunk_end = self.end_index else: chunk_end = chunk_start + self.chunk_size return chunk_start, chunk_end - def get_chunk_offsets(self): + def get_chunk_offsets(self) -> Generator[int, None, None]: index = self.start_index while index < self.end_index: yield index index += self.chunk_size - def process_chunk(self, chunk_start): + def process_chunk(self, chunk_start: int) -> None: chunk_start, chunk_end = self._calculate_range(chunk_start) - chunk_data = self._download_chunk(chunk_start, chunk_end - 1) + chunk_data, _ = self._download_chunk(chunk_start, chunk_end - 1) length = chunk_end - chunk_start if length > 0: self._write_to_stream(chunk_data, chunk_start) self._update_progress(length) - def yield_chunk(self, chunk_start): + def yield_chunk(self, chunk_start: int) -> Tuple[bytes, int]: chunk_start, chunk_end = self._calculate_range(chunk_start) return self._download_chunk(chunk_start, chunk_end - 1) - def _update_progress(self, length): + def _update_progress(self, length: int) -> None: if self.progress_lock: with self.progress_lock: # pylint: disable=not-context-manager self.progress_total += length else: self.progress_total += length - def _write_to_stream(self, chunk_data, chunk_start): + if self.progress_hook: + self.progress_hook(self.progress_total, self.total_size) + + def _write_to_stream(self, chunk_data: bytes, chunk_start: int) -> None: if self.stream_lock: with self.stream_lock: # pylint: disable=not-context-manager self.stream.seek(self.stream_start + (chunk_start - self.start_index)) @@ -147,7 +172,7 @@ def _write_to_stream(self, chunk_data, chunk_start): else: self.stream.write(chunk_data) - def _do_optimize(self, given_range_start, given_range_end): + def _do_optimize(self, given_range_start: int, given_range_end: int) -> bool: # If we have no page range list stored, then assume there's data everywhere for that page blob # or it's a block blob or append blob if self.non_empty_ranges is None: @@ -172,15 +197,18 @@ def _do_optimize(self, given_range_start, given_range_end): # Went through all src_ranges, but nothing overlapped. Optimization will be applied. return True - def _download_chunk(self, chunk_start, chunk_end): + def _download_chunk(self, chunk_start: int, chunk_end: int) -> Tuple[bytes, int]: + if self.encryption_options is None: + raise ValueError("Required argument is missing: encryption_options") download_range, offset = process_range_and_offset( - chunk_start, chunk_end, chunk_end, self.encryption_options + chunk_start, chunk_end, chunk_end, self.encryption_options, self.encryption_data ) # No need to download the empty chunk from server if there's no data in the chunk to be downloaded. # Do optimize and create empty chunk locally if condition is met. if self._do_optimize(download_range[0], download_range[1]): - chunk_data = b"\x00" * self.chunk_size + content_length = download_range[1] - download_range[0] + 1 + chunk_data = b"\x00" * content_length else: range_header, range_validation = validate_and_format_range_headers( download_range[0], @@ -188,98 +216,130 @@ def _download_chunk(self, chunk_start, chunk_end): check_content_md5=self.validate_content ) - try: - _, response = self.client.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self.validate_content, - data_stream_total=self.total_size, - download_stream_current=self.progress_total, - **self.request_options - ) - except HttpResponseError as error: - process_storage_error(error) + retry_active = True + retry_total = 3 + while retry_active: + response: Any = None + try: + _, response = self.client.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self.validate_content, + data_stream_total=self.total_size, + download_stream_current=self.progress_total, + **self.request_options + ) + except HttpResponseError as error: + process_storage_error(error) - chunk_data = process_content(response, offset[0], offset[1], self.encryption_options) + try: + chunk_data = process_content(response, offset[0], offset[1], self.encryption_options) + retry_active = False + except (IncompleteReadError, HttpResponseError, DecodeError) as error: + retry_total -= 1 + if retry_total <= 0: + raise HttpResponseError(error, error=error) from error + time.sleep(1) + content_length = response.content_length # This makes sure that if_match is set so that we can validate # that subsequent downloads are to an unmodified blob if self.request_options.get("modified_access_conditions"): self.request_options["modified_access_conditions"].if_match = response.properties.etag - return chunk_data + return chunk_data, content_length class _ChunkIterator(object): - """Async iterator for chunks in blob download stream.""" + """Iterator for chunks in blob download stream.""" - def __init__(self, size, content, downloader): + def __init__(self, size: int, content: bytes, downloader: Optional[_ChunkDownloader], chunk_size: int) -> None: self.size = size + self._chunk_size = chunk_size self._current_content = content self._iter_downloader = downloader - self._iter_chunks = None - self._complete = (size == 0) + self._iter_chunks: Optional[Generator[int, None, None]] = None + self._complete = size == 0 - def __len__(self): + def __len__(self) -> int: return self.size - def __iter__(self): + def __iter__(self) -> Iterator[bytes]: return self - def __next__(self): - """Iterate through responses.""" + # Iterate through responses. + def __next__(self) -> bytes: if self._complete: raise StopIteration("Download complete") if not self._iter_downloader: - # If no iterator was supplied, the download completed with - # the initial GET, so we just return that data + # cut the data obtained from initial GET into chunks + if len(self._current_content) > self._chunk_size: + return self._get_chunk_data() self._complete = True return self._current_content if not self._iter_chunks: self._iter_chunks = self._iter_downloader.get_chunk_offsets() - else: - chunk = next(self._iter_chunks) - self._current_content = self._iter_downloader.yield_chunk(chunk) - return self._current_content + # initial GET result still has more than _chunk_size bytes of data + if len(self._current_content) >= self._chunk_size: + return self._get_chunk_data() + + try: + next_chunk = next(self._iter_chunks) + self._current_content += self._iter_downloader.yield_chunk(next_chunk)[0] + except StopIteration as e: + self._complete = True + if self._current_content: + return self._current_content + raise e + + # the current content from the first get is still there but smaller than chunk size + # therefore we want to make sure its also included + return self._get_chunk_data() next = __next__ # Python 2 compatibility. + def _get_chunk_data(self) -> bytes: + chunk_data = self._current_content[: self._chunk_size] + self._current_content = self._current_content[self._chunk_size:] + return chunk_data -class StorageStreamDownloader(object): # pylint: disable=too-many-instance-attributes - """A streaming object to download from Azure Storage. - :ivar str name: - The name of the blob being downloaded. - :ivar str container: - The name of the container where the blob is. - :ivar ~azure.storage.blob.BlobProperties properties: - The properties of the blob being downloaded. If only a range of the data is being - downloaded, this will be reflected in the properties. - :ivar int size: - The size of the total data in the stream. This will be the byte range if specified, - otherwise the total size of the blob. +class StorageStreamDownloader(Generic[T]): # pylint: disable=too-many-instance-attributes + """ + A streaming object to download from Azure Storage. """ + name: str + """The name of the blob being downloaded.""" + container: str + """The name of the container where the blob is.""" + properties: "BlobProperties" + """The properties of the blob being downloaded. If only a range of the data is being + downloaded, this will be reflected in the properties.""" + size: int + """The size of the total data in the stream. This will be the byte range if specified, + otherwise the total size of the blob.""" + def __init__( self, - clients=None, - config=None, - start_range=None, - end_range=None, - validate_content=None, - encryption_options=None, - max_concurrency=1, - name=None, - container=None, - encoding=None, - **kwargs - ): + clients: "AzureBlobStorage" = None, # type: ignore [assignment] + config: "StorageConfiguration" = None, # type: ignore [assignment] + start_range: Optional[int] = None, + end_range: Optional[int] = None, + validate_content: bool = None, # type: ignore [assignment] + encryption_options: Dict[str, Any] = None, # type: ignore [assignment] + max_concurrency: int = 1, + name: str = None, # type: ignore [assignment] + container: str = None, # type: ignore [assignment] + encoding: Optional[str] = None, + download_cls: Optional[Callable] = None, + **kwargs: Any + ) -> None: self.name = name self.container = container - self.properties = None - self.size = None + self.size = 0 self._clients = clients self._config = config @@ -289,64 +349,97 @@ def __init__( self._encoding = encoding self._validate_content = validate_content self._encryption_options = encryption_options or {} + self._progress_hook = kwargs.pop('progress_hook', None) self._request_options = kwargs + self._response = None self._location_mode = None - self._download_complete = False - self._current_content = None - self._file_size = None + self._current_content: Union[str, bytes] = b'' + self._file_size = 0 self._non_empty_ranges = None - self._response = None + self._encryption_data: Optional["_EncryptionData"] = None + + # The content download offset, after any processing (decryption), in bytes + self._download_offset = 0 + # The raw download offset, before processing (decryption), in bytes + self._raw_download_offset = 0 + # The offset the stream has been read to in bytes or chars depending on mode + self._read_offset = 0 + # The offset into current_content that has been consumed in bytes or chars depending on mode + self._current_content_offset = 0 + + self._text_mode: Optional[bool] = None + self._decoder: Optional["IncrementalDecoder"] = None + # Whether the current content is the first chunk of download content or not + self._first_chunk = True + self._download_start = self._start_range or 0 + + # The cls is passed in via download_cls to avoid conflicting arg name with Generic.__new__ + # but needs to be changed to cls in the request options. + self._request_options['cls'] = download_cls + + if self._encryption_options.get("key") is not None or self._encryption_options.get("resolver") is not None: + self._get_encryption_data_request() # The service only provides transactional MD5s for chunks under 4MB. # If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first # chunk so a transactional MD5 can be retrieved. - self._first_get_size = ( + first_get_size = ( self._config.max_single_get_size if not self._validate_content else self._config.max_chunk_get_size ) - initial_request_start = self._start_range if self._start_range is not None else 0 - if self._end_range is not None and self._end_range - self._start_range < self._first_get_size: + initial_request_start = self._download_start + if self._end_range is not None and self._end_range - initial_request_start < first_get_size: initial_request_end = self._end_range else: - initial_request_end = initial_request_start + self._first_get_size - 1 + initial_request_end = initial_request_start + first_get_size - 1 self._initial_range, self._initial_offset = process_range_and_offset( - initial_request_start, initial_request_end, self._end_range, self._encryption_options + initial_request_start, + initial_request_end, + self._end_range, + self._encryption_options, + self._encryption_data ) self._response = self._initial_request() - self.properties = self._response.properties + self.properties = cast("BlobProperties", self._response.properties) self.properties.name = self.name self.properties.container = self.container - # Set the content length to the download size instead of the size of - # the last range + # Set the content length to the download size instead of the size of the last range self.properties.size = self.size - - # Overwrite the content range to the user requested range - self.properties.content_range = "bytes {0}-{1}/{2}".format( - self._start_range, - self._end_range, - self._file_size - ) + self.properties.content_range = (f"bytes {self._download_start}-" + f"{self._end_range if self._end_range is not None else self._file_size - 1}/" + f"{self._file_size}") # Overwrite the content MD5 as it is the MD5 for the last range instead # of the stored MD5 # TODO: Set to the stored MD5 when the service returns this - self.properties.content_md5 = None - - if self.size == 0: - self._current_content = b"" - else: - self._current_content = process_content( - self._response, - self._initial_offset[0], - self._initial_offset[1], - self._encryption_options - ) + self.properties.content_md5 = None # type: ignore [attr-defined] def __len__(self): return self.size + def _get_encryption_data_request(self) -> None: + # Save current request cls + download_cls = self._request_options.pop('cls', None) + # Adjust cls for get_properties + self._request_options['cls'] = deserialize_blob_properties + + properties = cast("BlobProperties", self._clients.blob.get_properties(**self._request_options)) + # This will return None if there is no encryption metadata or there are parsing errors. + # That is acceptable here, the proper error will be caught and surfaced when attempting + # to decrypt the blob. + self._encryption_data = parse_encryption_data(properties.metadata) + + # Restore cls for download + self._request_options['cls'] = download_cls + + @property + def _download_complete(self): + if is_encryption_v2(self._encryption_data): + return self._download_offset >= self.size + return self._raw_download_offset >= self.size + def _initial_request(self): range_header, range_validation = validate_and_format_range_headers( self._initial_range[0], @@ -356,51 +449,78 @@ def _initial_request(self): check_content_md5=self._validate_content ) - try: - location_mode, response = self._clients.blob.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self._validate_content, - data_stream_total=None, - download_stream_current=0, - **self._request_options - ) + retry_active = True + retry_total = 3 + while retry_active: + try: + location_mode, response = cast(Tuple[Optional[str], Any], self._clients.blob.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self._validate_content, + data_stream_total=None, + download_stream_current=0, + **self._request_options + )) - # Check the location we read from to ensure we use the same one - # for subsequent requests. - self._location_mode = location_mode - - # Parse the total file size and adjust the download size if ranges - # were specified - self._file_size = parse_length_from_content_range(response.properties.content_range) - if self._end_range is not None: - # Use the end range index unless it is over the end of the file - self.size = min(self._file_size, self._end_range - self._start_range + 1) - elif self._start_range is not None: - self.size = self._file_size - self._start_range - else: - self.size = self._file_size + # Check the location we read from to ensure we use the same one + # for subsequent requests. + self._location_mode = location_mode + + # Parse the total file size and adjust the download size if ranges + # were specified + self._file_size = parse_length_from_content_range(response.properties.content_range) + if self._file_size is None: + raise ValueError("Required Content-Range response header is missing or malformed.") + # Remove any extra encryption data size from blob size + self._file_size = adjust_blob_size_for_encryption(self._file_size, self._encryption_data) + + if self._end_range is not None and self._start_range is not None: + # Use the end range index unless it is over the end of the file + self.size = min(self._file_size - self._start_range, self._end_range - self._start_range + 1) + elif self._start_range is not None: + self.size = self._file_size - self._start_range + else: + self.size = self._file_size - except HttpResponseError as error: - if self._start_range is None and error.response.status_code == 416: - # Get range will fail on an empty file. If the user did not - # request a range, do a regular get request in order to get - # any properties. - try: - _, response = self._clients.blob.download( - validate_content=self._validate_content, - data_stream_total=0, - download_stream_current=0, - **self._request_options - ) - except HttpResponseError as error: + except HttpResponseError as error: + if self._start_range is None and error.response and error.response.status_code == 416: + # Get range will fail on an empty file. If the user did not + # request a range, do a regular get request in order to get + # any properties. + try: + _, response = self._clients.blob.download( + validate_content=self._validate_content, + data_stream_total=0, + download_stream_current=0, + **self._request_options + ) + except HttpResponseError as e: + process_storage_error(e) + + # Set the download size to empty + self.size = 0 + self._file_size = 0 + else: process_storage_error(error) - # Set the download size to empty - self.size = 0 - self._file_size = 0 - else: - process_storage_error(error) + try: + if self.size == 0: + self._current_content = b"" + else: + self._current_content = process_content( + response, + self._initial_offset[0], + self._initial_offset[1], + self._encryption_options + ) + retry_active = False + except (IncompleteReadError, HttpResponseError, DecodeError) as error: + retry_total -= 1 + if retry_total <= 0: + raise HttpResponseError(error, error=error) from error + time.sleep(1) + self._download_offset += len(self._current_content) + self._raw_download_offset += response.content_length # get page ranges to optimize downloading sparse page blob if response.properties.blob_type == 'PageBlob': @@ -414,104 +534,249 @@ def _initial_request(self): except HttpResponseError: pass - # If the file is small, the download is complete at this point. - # If file size is large, download the rest of the file in chunks. - if response.properties.size != self.size: - # Lock on the etag. This can be overridden by the user by specifying '*' - if self._request_options.get("modified_access_conditions"): - if not self._request_options["modified_access_conditions"].if_match: - self._request_options["modified_access_conditions"].if_match = response.properties.etag - else: - self._download_complete = True + if not self._download_complete and self._request_options.get("modified_access_conditions"): + self._request_options["modified_access_conditions"].if_match = response.properties.etag + return response - def chunks(self): - if self.size == 0 or self._download_complete: - iter_downloader = None - else: - data_end = self._file_size - if self._end_range is not None: - # Use the end range index unless it is over the end of the file - data_end = min(self._file_size, self._end_range + 1) + def chunks(self) -> Iterator[bytes]: + """ + Iterate over chunks in the download stream. Note, the iterator returned will + iterate over the entire download content, regardless of any data that was + previously read. + + NOTE: If the stream has been partially read, some data may be re-downloaded by the iterator. + + :returns: An iterator of the chunks in the download stream. + :rtype: Iterator[bytes] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world.py + :start-after: [START download_a_blob_in_chunk] + :end-before: [END download_a_blob_in_chunk] + :language: python + :dedent: 12 + :caption: Download a blob using chunks(). + """ + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. chunks is not supported in text mode.") + if self._encoding: + warnings.warn("Encoding is ignored with chunks as only bytes are supported.") + + iter_downloader = None + # If we still have the first chunk buffered, use it. Otherwise, download all content again + if not self._first_chunk or not self._download_complete: + if self._first_chunk: + start = self._download_start + len(self._current_content) + current_progress = len(self._current_content) + else: + start = self._download_start + current_progress = 0 + + end = self._download_start + self.size + iter_downloader = _ChunkDownloader( client=self._clients.blob, non_empty_ranges=self._non_empty_ranges, total_size=self.size, chunk_size=self._config.max_chunk_get_size, - current_progress=self._first_get_size, - start_range=self._initial_range[1] + 1, # start where the first download ended - end_range=data_end, - stream=None, - parallel=False, + current_progress=current_progress, + start_range=start, + end_range=end, validate_content=self._validate_content, encryption_options=self._encryption_options, + encryption_data=self._encryption_data, use_location=self._location_mode, **self._request_options ) + + initial_content = self._current_content if self._first_chunk else b'' return _ChunkIterator( size=self.size, - content=self._current_content, - downloader=iter_downloader) + content=cast(bytes, initial_content), + downloader=iter_downloader, + chunk_size=self._config.max_chunk_get_size) - def readall(self): - """Download the contents of this blob. + @overload + def read(self, size: int = -1) -> T: + ... - This operation is blocking until all data is downloaded. + @overload + def read(self, *, chars: Optional[int] = None) -> T: + ... - :rtype: bytes or str + # pylint: disable-next=too-many-statements,too-many-branches + def read(self, size: int = -1, *, chars: Optional[int] = None) -> T: """ - stream = BytesIO() - self.readinto(stream) - data = stream.getvalue() - if self._encoding: - return data.decode(self._encoding) - return data + Read the specified bytes or chars from the stream. If `encoding` + was specified on `download_blob`, it is recommended to use the + chars parameter to read a specific number of chars to avoid decoding + errors. If size/chars is unspecified or negative all bytes will be read. + + :param int size: + The number of bytes to download from the stream. Leave unspecified + or set negative to download all bytes. + :keyword Optional[int] chars: + The number of chars to download from the stream. Leave unspecified + or set negative to download all chars. Note, this can only be used + when encoding is specified on `download_blob`. + :returns: + The requested data as bytes or a string if encoding was specified. If + the return value is empty, there is no more data to read. + :rtype: T + """ + if size > -1 and self._encoding: + warnings.warn( + "Size parameter specified with text encoding enabled. It is recommended to use chars " + "to read a specific number of characters instead." + ) + if size > -1 and chars is not None: + raise ValueError("Cannot specify both size and chars.") + if not self._encoding and chars is not None: + raise ValueError("Must specify encoding to read chars.") + if self._text_mode and size > -1: + raise ValueError("Stream has been partially read in text mode. Please use chars.") + if self._text_mode is False and chars is not None: + raise ValueError("Stream has been partially read in bytes mode. Please use size.") + + # Empty blob or already read to the end + if (size == 0 or chars == 0 or + (self._download_complete and self._current_content_offset >= len(self._current_content))): + return b'' if not self._encoding else '' # type: ignore [return-value] + + if not self._text_mode and chars is not None and self._encoding is not None: + self._text_mode = True + self._decoder = codecs.getincrementaldecoder(self._encoding)('strict') + self._current_content = self._decoder.decode( + cast(bytes, self._current_content), final=self._download_complete) + elif self._text_mode is None: + self._text_mode = False + + output_stream: Union[BytesIO, StringIO] + if self._text_mode: + output_stream = StringIO() + size = chars if chars else sys.maxsize + else: + output_stream = BytesIO() + size = size if size > 0 else sys.maxsize + readall = size == sys.maxsize + count = 0 + + # Start by reading from current_content + start = self._current_content_offset + length = min(len(self._current_content) - self._current_content_offset, size - count) + read = output_stream.write(self._current_content[start:start + length]) # type: ignore [arg-type] + + count += read + self._current_content_offset += read + self._read_offset += read + self._check_and_report_progress() + + remaining = size - count + if remaining > 0 and not self._download_complete: + # Create a downloader than can download the rest of the file + start = self._download_start + self._download_offset + end = self._download_start + self.size + + parallel = self._max_concurrency > 1 + downloader = _ChunkDownloader( + client=self._clients.blob, + non_empty_ranges=self._non_empty_ranges, + total_size=self.size, + chunk_size=self._config.max_chunk_get_size, + current_progress=self._read_offset, + start_range=start, + end_range=end, + stream=output_stream, + parallel=parallel, + validate_content=self._validate_content, + encryption_options=self._encryption_options, + encryption_data=self._encryption_data, + use_location=self._location_mode, + progress_hook=self._progress_hook, + **self._request_options + ) + self._first_chunk = False + + # When reading all data, have the downloader read everything into the stream. + # Else, read one chunk at a time (using the downloader as an iterator) until + # the requested size is reached. + chunks_iter = downloader.get_chunk_offsets() + if readall and not self._text_mode: + # Only do parallel if there is more than one chunk left to download + if parallel and (self.size - self._download_offset) > self._config.max_chunk_get_size: + import concurrent.futures + with concurrent.futures.ThreadPoolExecutor(self._max_concurrency) as executor: + list(executor.map( + with_current_context(downloader.process_chunk), + downloader.get_chunk_offsets() + )) + else: + for next_chunk in chunks_iter: + downloader.process_chunk(next_chunk) + + self._complete_read() - def content_as_bytes(self, max_concurrency=1): - """Download the contents of this file. + else: + while (chunk := next(chunks_iter, None)) is not None and remaining > 0: + chunk_data, content_length = downloader.yield_chunk(chunk) + self._download_offset += len(chunk_data) + self._raw_download_offset += content_length + if self._text_mode and self._decoder is not None: + self._current_content = self._decoder.decode(chunk_data, final=self._download_complete) + else: + self._current_content = chunk_data + + if remaining < len(self._current_content): + read = output_stream.write(self._current_content[:remaining]) # type: ignore [arg-type] + else: + read = output_stream.write(self._current_content) # type: ignore [arg-type] + + self._current_content_offset = read + self._read_offset += read + remaining -= read + self._check_and_report_progress() + + data = output_stream.getvalue() + if not self._text_mode and self._encoding: + try: + # This is technically incorrect to do, but we have it for backwards compatibility. + data = cast(bytes, data).decode(self._encoding) + except UnicodeDecodeError: + warnings.warn( + "Encountered a decoding error while decoding blob data from a partial read. " + "Try using the `chars` keyword instead to read in text mode." + ) + raise - This operation is blocking until all data is downloaded. + return data # type: ignore [return-value] - :keyword int max_concurrency: - The number of parallel connections with which to download. - :rtype: bytes + def readall(self) -> T: """ - warnings.warn( - "content_as_bytes is deprecated, use readall instead", - DeprecationWarning - ) - self._max_concurrency = max_concurrency - return self.readall() - - def content_as_text(self, max_concurrency=1, encoding="UTF-8"): - """Download the contents of this blob, and decode as text. - + Read the entire contents of this blob. This operation is blocking until all data is downloaded. - :keyword int max_concurrency: - The number of parallel connections with which to download. - :param str encoding: - Test encoding to decode the downloaded bytes. Default is UTF-8. - :rtype: str + :returns: The requested data as bytes or a string if encoding was specified. + :rtype: T """ - warnings.warn( - "content_as_text is deprecated, use readall instead", - DeprecationWarning - ) - self._max_concurrency = max_concurrency - self._encoding = encoding - return self.readall() + return self.read() - def readinto(self, stream): + def readinto(self, stream: IO[bytes]) -> int: """Download the contents of this file to a stream. - :param stream: + :param IO[bytes] stream: The stream to download to. This can be an open file-handle, or any writable stream. The stream must be seekable if the download uses more than one parallel connection. :returns: The number of bytes read. :rtype: int """ + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. readinto is not supported in text mode.") + if self._encoding: + warnings.warn("Encoding is ignored with readinto as only byte streams are supported.") + # The stream must be seekable if parallel download is required parallel = self._max_concurrency > 1 if parallel: @@ -521,53 +786,137 @@ def readinto(self, stream): try: stream.seek(stream.tell()) - except (NotImplementedError, AttributeError): - raise ValueError(error_message) - - # Write the content to the user stream - stream.write(self._current_content) + except (NotImplementedError, AttributeError) as exc: + raise ValueError(error_message) from exc + + # If some data has been streamed using `read`, only stream the remaining data + remaining_size = self.size - self._read_offset + # Already read to the end + if remaining_size <= 0: + return 0 + + # Write the current content to the user stream + current_remaining = len(self._current_content) - self._current_content_offset + start = self._current_content_offset + count = stream.write(cast(bytes, self._current_content[start:start + current_remaining])) + + self._current_content_offset += count + self._read_offset += count + if self._progress_hook: + self._progress_hook(self._read_offset, self.size) + + # If all the data was already downloaded/buffered if self._download_complete: - return self.size + return remaining_size - data_end = self._file_size - if self._end_range is not None: - # Use the length unless it is over the end of the file - data_end = min(self._file_size, self._end_range + 1) + data_start = self._download_start + self._read_offset + data_end = self._download_start + self.size downloader = _ChunkDownloader( client=self._clients.blob, non_empty_ranges=self._non_empty_ranges, total_size=self.size, chunk_size=self._config.max_chunk_get_size, - current_progress=self._first_get_size, - start_range=self._initial_range[1] + 1, # Start where the first download ended + current_progress=self._read_offset, + start_range=data_start, end_range=data_end, stream=stream, parallel=parallel, validate_content=self._validate_content, encryption_options=self._encryption_options, + encryption_data=self._encryption_data, use_location=self._location_mode, + progress_hook=self._progress_hook, **self._request_options ) if parallel: import concurrent.futures - executor = concurrent.futures.ThreadPoolExecutor(self._max_concurrency) - list(executor.map( - with_current_context(downloader.process_chunk), - downloader.get_chunk_offsets() - )) + with concurrent.futures.ThreadPoolExecutor(self._max_concurrency) as executor: + list(executor.map( + with_current_context(downloader.process_chunk), + downloader.get_chunk_offsets() + )) else: for chunk in downloader.get_chunk_offsets(): downloader.process_chunk(chunk) - return self.size + + self._complete_read() + return remaining_size + + def _complete_read(self): + """Adjusts all offsets to the end of the download.""" + self._download_offset = self.size + self._raw_download_offset = self.size + self._read_offset = self.size + self._current_content_offset = len(self._current_content) + + def _check_and_report_progress(self): + """Reports progress if necessary.""" + # Only report progress at the end of each chunk and use download_offset to always report + # progress in terms of (approximate) byte count. + if self._progress_hook and self._current_content_offset == len(self._current_content): + self._progress_hook(self._download_offset, self.size) + + def content_as_bytes(self, max_concurrency=1): + """DEPRECATED: Download the contents of this file. + + This operation is blocking until all data is downloaded. + + This method is deprecated, use func:`readall` instead. + + :param int max_concurrency: + The number of parallel connections with which to download. + :returns: The contents of the file as bytes. + :rtype: bytes + """ + warnings.warn( + "content_as_bytes is deprecated, use readall instead", + DeprecationWarning + ) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. " + "content_as_bytes is not supported in text mode.") + + self._max_concurrency = max_concurrency + return self.readall() + + def content_as_text(self, max_concurrency=1, encoding="UTF-8"): + """DEPRECATED: Download the contents of this blob, and decode as text. + + This operation is blocking until all data is downloaded. + + This method is deprecated, use func:`readall` instead. + + :param int max_concurrency: + The number of parallel connections with which to download. + :param str encoding: + Test encoding to decode the downloaded bytes. Default is UTF-8. + :returns: The content of the file as a str. + :rtype: str + """ + warnings.warn( + "content_as_text is deprecated, use readall instead", + DeprecationWarning + ) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. " + "content_as_text is not supported in text mode.") + + self._max_concurrency = max_concurrency + self._encoding = encoding + return self.readall() def download_to_stream(self, stream, max_concurrency=1): - """Download the contents of this blob to a stream. + """DEPRECATED: Download the contents of this blob to a stream. + + This method is deprecated, use func:`readinto` instead. - :param stream: + :param IO[T] stream: The stream to download to. This can be an open file-handle, or any writable stream. The stream must be seekable if the download uses more than one parallel connection. + :param int max_concurrency: + The number of parallel connections with which to download. :returns: The properties of the downloaded blob. :rtype: Any """ @@ -575,6 +924,10 @@ def download_to_stream(self, stream, max_concurrency=1): "download_to_stream is deprecated, use readinto instead", DeprecationWarning ) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. " + "download_to_stream is not supported in text mode.") + self._max_concurrency = max_concurrency self.readinto(stream) return self.properties diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_encryption.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_encryption.py new file mode 100644 index 000000000000..42f5c51d0762 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_encryption.py @@ -0,0 +1,1127 @@ +# pylint: disable=too-many-lines +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import math +import os +import sys +import warnings +from collections import OrderedDict +from io import BytesIO +from json import ( + dumps, + loads, +) +from typing import Any, Callable, Dict, IO, Optional, Tuple, TYPE_CHECKING +from typing import OrderedDict as TypedOrderedDict +from typing_extensions import Protocol + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.aead import AESGCM +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import CBC +from cryptography.hazmat.primitives.padding import PKCS7 + +from azure.core.exceptions import HttpResponseError +from azure.core.utils import CaseInsensitiveDict + +from ._version import VERSION +from ._shared import decode_base64_to_bytes, encode_base64 + +if TYPE_CHECKING: + from azure.core.pipeline import PipelineResponse + from cryptography.hazmat.primitives.ciphers import AEADEncryptionContext + from cryptography.hazmat.primitives.padding import PaddingContext + + +_ENCRYPTION_PROTOCOL_V1 = '1.0' +_ENCRYPTION_PROTOCOL_V2 = '2.0' +_ENCRYPTION_PROTOCOL_V2_1 = '2.1' +_VALID_ENCRYPTION_PROTOCOLS = [_ENCRYPTION_PROTOCOL_V1, _ENCRYPTION_PROTOCOL_V2, _ENCRYPTION_PROTOCOL_V2_1] +_ENCRYPTION_V2_PROTOCOLS = [_ENCRYPTION_PROTOCOL_V2, _ENCRYPTION_PROTOCOL_V2_1] +_GCM_REGION_DATA_LENGTH = 4 * 1024 * 1024 +_GCM_NONCE_LENGTH = 12 +_GCM_TAG_LENGTH = 16 + +_ERROR_OBJECT_INVALID = \ + '{0} does not define a complete interface. Value of {1} is either missing or invalid.' + +_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION = ( + 'The require_encryption flag is set, but encryption is not supported' + ' for this method.') + + +class KeyEncryptionKey(Protocol): + + def wrap_key(self, key: bytes) -> bytes: + ... + + def unwrap_key(self, key: bytes, algorithm: str) -> bytes: + ... + + def get_kid(self) -> str: + ... + + def get_key_wrap_algorithm(self) -> str: + ... + + +def _validate_not_none(param_name: str, param: Any): + if param is None: + raise ValueError(f'{param_name} should not be None.') + + +def _validate_key_encryption_key_wrap(kek: KeyEncryptionKey): + # Note that None is not callable and so will fail the second clause of each check. + if not hasattr(kek, 'wrap_key') or not callable(kek.wrap_key): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'wrap_key')) + if not hasattr(kek, 'get_kid') or not callable(kek.get_kid): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid')) + if not hasattr(kek, 'get_key_wrap_algorithm') or not callable(kek.get_key_wrap_algorithm): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_key_wrap_algorithm')) + + +class StorageEncryptionMixin(object): + def _configure_encryption(self, kwargs: Dict[str, Any]): + self.require_encryption = kwargs.get("require_encryption", False) + self.encryption_version = kwargs.get("encryption_version", "1.0") + self.key_encryption_key = kwargs.get("key_encryption_key") + self.key_resolver_function = kwargs.get("key_resolver_function") + if self.key_encryption_key and self.encryption_version == '1.0': + warnings.warn("This client has been configured to use encryption with version 1.0. " + + "Version 1.0 is deprecated and no longer considered secure. It is highly " + + "recommended that you switch to using version 2.0. The version can be " + + "specified using the 'encryption_version' keyword.") + + +class _EncryptionAlgorithm(object): + """ + Specifies which client encryption algorithm is used. + """ + AES_CBC_256 = 'AES_CBC_256' + AES_GCM_256 = 'AES_GCM_256' + + +class _WrappedContentKey: + """ + Represents the envelope key details stored on the service. + """ + + def __init__(self, algorithm: str, encrypted_key: bytes, key_id: str) -> None: + """ + :param str algorithm: + The algorithm used for wrapping. + :param bytes encrypted_key: + The encrypted content-encryption-key. + :param str key_id: + The key-encryption-key identifier string. + """ + _validate_not_none('algorithm', algorithm) + _validate_not_none('encrypted_key', encrypted_key) + _validate_not_none('key_id', key_id) + + self.algorithm = algorithm + self.encrypted_key = encrypted_key + self.key_id = key_id + + +class _EncryptedRegionInfo: + """ + Represents the length of encryption elements. + This is only used for Encryption V2. + """ + + def __init__(self, data_length: int, nonce_length: int, tag_length: int) -> None: + """ + :param int data_length: + The length of the encryption region data (not including nonce + tag). + :param int nonce_length: + The length of nonce used when encrypting. + :param int tag_length: + The length of the encryption tag. + """ + _validate_not_none('data_length', data_length) + _validate_not_none('nonce_length', nonce_length) + _validate_not_none('tag_length', tag_length) + + self.data_length = data_length + self.nonce_length = nonce_length + self.tag_length = tag_length + + +class _EncryptionAgent: + """ + Represents the encryption agent stored on the service. + It consists of the encryption protocol version and encryption algorithm used. + """ + + def __init__(self, encryption_algorithm: _EncryptionAlgorithm, protocol: str) -> None: + """ + :param _EncryptionAlgorithm encryption_algorithm: + The algorithm used for encrypting the message contents. + :param str protocol: + The protocol version used for encryption. + """ + _validate_not_none('encryption_algorithm', encryption_algorithm) + _validate_not_none('protocol', protocol) + + self.encryption_algorithm = str(encryption_algorithm) + self.protocol = protocol + + +class _EncryptionData: + """ + Represents the encryption data that is stored on the service. + """ + + def __init__( + self, content_encryption_IV: Optional[bytes], + encrypted_region_info: Optional[_EncryptedRegionInfo], + encryption_agent: _EncryptionAgent, + wrapped_content_key: _WrappedContentKey, + key_wrapping_metadata: Dict[str, Any] + ) -> None: + """ + :param Optional[bytes] content_encryption_IV: + The content encryption initialization vector. + Required for AES-CBC (V1). + :param Optional[_EncryptedRegionInfo] encrypted_region_info: + The info about the autenticated block sizes. + Required for AES-GCM (V2). + :param _EncryptionAgent encryption_agent: + The encryption agent. + :param _WrappedContentKey wrapped_content_key: + An object that stores the wrapping algorithm, the key identifier, + and the encrypted key bytes. + :param Dict[str, Any] key_wrapping_metadata: + A dict containing metadata related to the key wrapping. + """ + _validate_not_none('encryption_agent', encryption_agent) + _validate_not_none('wrapped_content_key', wrapped_content_key) + + # Validate we have the right matching optional parameter for the specified algorithm + if encryption_agent.encryption_algorithm == _EncryptionAlgorithm.AES_CBC_256: + _validate_not_none('content_encryption_IV', content_encryption_IV) + elif encryption_agent.encryption_algorithm == _EncryptionAlgorithm.AES_GCM_256: + _validate_not_none('encrypted_region_info', encrypted_region_info) + else: + raise ValueError("Invalid encryption algorithm.") + + self.content_encryption_IV = content_encryption_IV + self.encrypted_region_info = encrypted_region_info + self.encryption_agent = encryption_agent + self.wrapped_content_key = wrapped_content_key + self.key_wrapping_metadata = key_wrapping_metadata + + +class GCMBlobEncryptionStream: + """ + A stream that performs AES-GCM encryption on the given data as + it's streamed. Data is read and encrypted in regions. The stream + will use the same encryption key and will generate a guaranteed unique + nonce for each encryption region. + """ + def __init__( + self, content_encryption_key: bytes, + data_stream: IO[bytes], + ) -> None: + """ + :param bytes content_encryption_key: The encryption key to use. + :param IO[bytes] data_stream: The data stream to read data from. + """ + self.content_encryption_key = content_encryption_key + self.data_stream = data_stream + + self.offset = 0 + self.current = b'' + self.nonce_counter = 0 + + def read(self, size: int = -1) -> bytes: + """ + Read data from the stream. Specify -1 to read all available data. + + :param int size: The amount of data to read. Defaults to -1 for all data. + :return: The bytes read. + :rtype: bytes + """ + result = BytesIO() + remaining = sys.maxsize if size == -1 else size + + while remaining > 0: + # Start by reading from current + if len(self.current) > 0: + read = min(remaining, len(self.current)) + result.write(self.current[:read]) + + self.current = self.current[read:] + self.offset += read + remaining -= read + + if remaining > 0: + # Read one region of data and encrypt it + data = self.data_stream.read(_GCM_REGION_DATA_LENGTH) + if len(data) == 0: + # No more data to read + break + + self.current = encrypt_data_v2(data, self.nonce_counter, self.content_encryption_key) + # IMPORTANT: Must increment the nonce each time. + self.nonce_counter += 1 + + return result.getvalue() + + +def encrypt_data_v2(data: bytes, nonce: int, key: bytes) -> bytes: + """ + Encrypts the given data using the given nonce and key using AES-GCM. + The result includes the data in the form: nonce + ciphertext + tag. + + :param bytes data: The raw data to encrypt. + :param int nonce: The nonce to use for encryption. + :param bytes key: The encryption key to use for encryption. + :return: The encrypted bytes in the form: nonce + ciphertext + tag. + :rtype: bytes + """ + nonce_bytes = nonce.to_bytes(_GCM_NONCE_LENGTH, 'big') + aesgcm = AESGCM(key) + + # Returns ciphertext + tag + ciphertext_with_tag = aesgcm.encrypt(nonce_bytes, data, None) + return nonce_bytes + ciphertext_with_tag + + +def is_encryption_v2(encryption_data: Optional[_EncryptionData]) -> bool: + """ + Determine whether the given encryption data signifies version 2.0 or 2.1. + + :param Optional[_EncryptionData] encryption_data: The encryption data. Will return False if this is None. + :return: True, if the encryption data indicates encryption V2, false otherwise. + :rtype: bool + """ + # If encryption_data is None, assume no encryption + return bool(encryption_data and (encryption_data.encryption_agent.protocol in _ENCRYPTION_V2_PROTOCOLS)) + + +def modify_user_agent_for_encryption( + user_agent: str, + moniker: str, + encryption_version: str, + request_options: Dict[str, Any] + ) -> None: + """ + Modifies the request options to contain a user agent string updated with encryption information. + Adds azstorage-clientsideencryption/ immediately proceeding the SDK descriptor. + + :param str user_agent: The existing User Agent to modify. + :param str moniker: The specific SDK moniker. The modification will immediately proceed azsdk-python-{moniker}. + :param str encryption_version: The version of encryption being used. + :param Dict[str, Any] request_options: The reuqest options to add the user agent override to. + """ + # If the user has specified user_agent_overwrite=True, don't make any modifications + if request_options.get('user_agent_overwrite'): + return + + # If the feature flag is already present, don't add it again + feature_flag = f"azstorage-clientsideencryption/{encryption_version}" + if feature_flag in user_agent: + return + + index = user_agent.find(f"azsdk-python-{moniker}") + user_agent = f"{user_agent[:index]}{feature_flag} {user_agent[index:]}" + # Since we are using user_agent_overwrite=True, we must prepend the user's user_agent if there is one + if request_options.get('user_agent'): + user_agent = f"{request_options.get('user_agent')} {user_agent}" + + request_options['user_agent'] = user_agent + request_options['user_agent_overwrite'] = True + + +def get_adjusted_upload_size(length: int, encryption_version: str) -> int: + """ + Get the adjusted size of the blob upload which accounts for + extra encryption data (padding OR nonce + tag). + + :param int length: The plaintext data length. + :param str encryption_version: The version of encryption being used. + :return: The new upload size to use. + :rtype: int + """ + if encryption_version == _ENCRYPTION_PROTOCOL_V1: + return length + (16 - (length % 16)) + + if encryption_version == _ENCRYPTION_PROTOCOL_V2: + encryption_data_length = _GCM_NONCE_LENGTH + _GCM_TAG_LENGTH + regions = math.ceil(length / _GCM_REGION_DATA_LENGTH) + return length + (regions * encryption_data_length) + + raise ValueError("Invalid encryption version specified.") + + +def get_adjusted_download_range_and_offset( + start: int, + end: int, + length: Optional[int], + encryption_data: Optional[_EncryptionData]) -> Tuple[Tuple[int, int], Tuple[int, int]]: + """ + Gets the new download range and offsets into the decrypted data for + the given user-specified range. The new download range will include all + the data needed to decrypt the user-provided range and will include only + full encryption regions. + + The offsets returned will be the offsets needed to fetch the user-requested + data out of the full decrypted data. The end offset is different based on the + encryption version. For V1, the end offset is offset from the end whereas for + V2, the end offset is the ending index into the stream. + V1: decrypted_data[start_offset : len(decrypted_data) - end_offset] + V2: decrypted_data[start_offset : end_offset] + + :param int start: The user-requested start index. + :param int end: The user-requested end index. + :param Optional[int] length: The user-requested length. Only used for V1. + :param Optional[_EncryptionData] encryption_data: The encryption data to determine version and sizes. + :return: (new start, new end), (start offset, end offset) + :rtype: Tuple[Tuple[int, int], Tuple[int, int]] + """ + start_offset, end_offset = 0, 0 + if encryption_data is None: + return (start, end), (start_offset, end_offset) + + if encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V1: + if start is not None: + # Align the start of the range along a 16 byte block + start_offset = start % 16 + start -= start_offset + + # Include an extra 16 bytes for the IV if necessary + # Because of the previous offsetting, start_range will always + # be a multiple of 16. + if start > 0: + start_offset += 16 + start -= 16 + + if length is not None: + # Align the end of the range along a 16 byte block + end_offset = 15 - (end % 16) + end += end_offset + + elif encryption_data.encryption_agent.protocol in _ENCRYPTION_V2_PROTOCOLS: + start_offset, end_offset = 0, end + + if encryption_data.encrypted_region_info is None: + raise ValueError("Missing required metadata for Encryption V2") + + nonce_length = encryption_data.encrypted_region_info.nonce_length + data_length = encryption_data.encrypted_region_info.data_length + tag_length = encryption_data.encrypted_region_info.tag_length + region_length = nonce_length + data_length + tag_length + requested_length = end - start + + if start is not None: + # Find which data region the start is in + region_num = start // data_length + # The start of the data region is different from the start of the encryption region + data_start = region_num * data_length + region_start = region_num * region_length + # Offset is based on data region + start_offset = start - data_start + # New start is the start of the encryption region + start = region_start + + if end is not None: + # Find which data region the end is in + region_num = end // data_length + end_offset = start_offset + requested_length + 1 + # New end is the end of the encryption region + end = (region_num * region_length) + region_length - 1 + + return (start, end), (start_offset, end_offset) + + +def parse_encryption_data(metadata: Dict[str, Any]) -> Optional[_EncryptionData]: + """ + Parses the encryption data out of the given blob metadata. If metadata does + not exist or there are parsing errors, this function will just return None. + + :param Dict[str, Any] metadata: The blob metadata parsed from the response. + :return: The encryption data or None + :rtype: Optional[_EncryptionData] + """ + try: + # Use case insensitive dict as key needs to be case-insensitive + case_insensitive_metadata = CaseInsensitiveDict(metadata) + return _dict_to_encryption_data(loads(case_insensitive_metadata['encryptiondata'])) + except: # pylint: disable=bare-except + return None + + +def adjust_blob_size_for_encryption(size: int, encryption_data: Optional[_EncryptionData]) -> int: + """ + Adjusts the given blob size for encryption by subtracting the size of + the encryption data (nonce + tag). This only has an affect for encryption V2. + + :param int size: The original blob size. + :param Optional[_EncryptionData] encryption_data: The encryption data to determine version and sizes. + :return: The new blob size. + :rtype: int + """ + if (encryption_data is not None and + encryption_data.encrypted_region_info is not None and + is_encryption_v2(encryption_data)): + + nonce_length = encryption_data.encrypted_region_info.nonce_length + data_length = encryption_data.encrypted_region_info.data_length + tag_length = encryption_data.encrypted_region_info.tag_length + region_length = nonce_length + data_length + tag_length + + num_regions = math.ceil(size / region_length) + metadata_size = num_regions * (nonce_length + tag_length) + return size - metadata_size + + return size + + +def _generate_encryption_data_dict( + kek: KeyEncryptionKey, + cek: bytes, + iv: Optional[bytes], + version: str + ) -> TypedOrderedDict[str, Any]: + """ + Generates and returns the encryption metadata as a dict. + + :param KeyEncryptionKey kek: The key encryption key. See calling functions for more information. + :param bytes cek: The content encryption key. + :param Optional[bytes] iv: The initialization vector. Only required for AES-CBC. + :param str version: The client encryption version used. + :return: A dict containing all the encryption metadata. + :rtype: Dict[str, Any] + """ + # Encrypt the cek. + if version == _ENCRYPTION_PROTOCOL_V1: + wrapped_cek = kek.wrap_key(cek) + # For V2, we include the encryption version in the wrapped key. + elif version == _ENCRYPTION_PROTOCOL_V2: + # We must pad the version to 8 bytes for AES Keywrap algorithms + to_wrap = _ENCRYPTION_PROTOCOL_V2.encode().ljust(8, b'\0') + cek + wrapped_cek = kek.wrap_key(to_wrap) + else: + raise ValueError("Invalid encryption version specified.") + + # Build the encryption_data dict. + # Use OrderedDict to comply with Java's ordering requirement. + wrapped_content_key = OrderedDict() + wrapped_content_key['KeyId'] = kek.get_kid() + wrapped_content_key['EncryptedKey'] = encode_base64(wrapped_cek) + wrapped_content_key['Algorithm'] = kek.get_key_wrap_algorithm() + + encryption_agent = OrderedDict() + encryption_agent['Protocol'] = version + + if version == _ENCRYPTION_PROTOCOL_V1: + encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_CBC_256 + + elif version == _ENCRYPTION_PROTOCOL_V2: + encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_GCM_256 + + encrypted_region_info = OrderedDict() + encrypted_region_info['DataLength'] = _GCM_REGION_DATA_LENGTH + encrypted_region_info['NonceLength'] = _GCM_NONCE_LENGTH + + encryption_data_dict: TypedOrderedDict[str, Any] = OrderedDict() + encryption_data_dict['WrappedContentKey'] = wrapped_content_key + encryption_data_dict['EncryptionAgent'] = encryption_agent + if version == _ENCRYPTION_PROTOCOL_V1: + encryption_data_dict['ContentEncryptionIV'] = encode_base64(iv) + elif version == _ENCRYPTION_PROTOCOL_V2: + encryption_data_dict['EncryptedRegionInfo'] = encrypted_region_info + encryption_data_dict['KeyWrappingMetadata'] = OrderedDict({'EncryptionLibrary': 'Python ' + VERSION}) + + return encryption_data_dict + + +def _dict_to_encryption_data(encryption_data_dict: Dict[str, Any]) -> _EncryptionData: + """ + Converts the specified dictionary to an EncryptionData object for + eventual use in decryption. + + :param dict encryption_data_dict: + The dictionary containing the encryption data. + :return: an _EncryptionData object built from the dictionary. + :rtype: _EncryptionData + """ + try: + protocol = encryption_data_dict['EncryptionAgent']['Protocol'] + if protocol not in _VALID_ENCRYPTION_PROTOCOLS: + raise ValueError("Unsupported encryption version.") + except KeyError as exc: + raise ValueError("Unsupported encryption version.") from exc + wrapped_content_key = encryption_data_dict['WrappedContentKey'] + wrapped_content_key = _WrappedContentKey(wrapped_content_key['Algorithm'], + decode_base64_to_bytes(wrapped_content_key['EncryptedKey']), + wrapped_content_key['KeyId']) + + encryption_agent = encryption_data_dict['EncryptionAgent'] + encryption_agent = _EncryptionAgent(encryption_agent['EncryptionAlgorithm'], + encryption_agent['Protocol']) + + if 'KeyWrappingMetadata' in encryption_data_dict: + key_wrapping_metadata = encryption_data_dict['KeyWrappingMetadata'] + else: + key_wrapping_metadata = None + + # AES-CBC only + encryption_iv = None + if 'ContentEncryptionIV' in encryption_data_dict: + encryption_iv = decode_base64_to_bytes(encryption_data_dict['ContentEncryptionIV']) + + # AES-GCM only + region_info = None + if 'EncryptedRegionInfo' in encryption_data_dict: + encrypted_region_info = encryption_data_dict['EncryptedRegionInfo'] + region_info = _EncryptedRegionInfo(encrypted_region_info['DataLength'], + encrypted_region_info['NonceLength'], + _GCM_TAG_LENGTH) + + encryption_data = _EncryptionData(encryption_iv, + region_info, + encryption_agent, + wrapped_content_key, + key_wrapping_metadata) + + return encryption_data + + +def _generate_AES_CBC_cipher(cek: bytes, iv: bytes) -> Cipher: + """ + Generates and returns an encryption cipher for AES CBC using the given cek and iv. + + :param bytes[] cek: The content encryption key for the cipher. + :param bytes[] iv: The initialization vector for the cipher. + :return: A cipher for encrypting in AES256 CBC. + :rtype: ~cryptography.hazmat.primitives.ciphers.Cipher + """ + + backend = default_backend() + algorithm = AES(cek) + mode = CBC(iv) + return Cipher(algorithm, mode, backend) + + +def _validate_and_unwrap_cek( + encryption_data: _EncryptionData, + key_encryption_key: Optional[KeyEncryptionKey] = None, + key_resolver: Optional[Callable[[str], KeyEncryptionKey]] = None +) -> bytes: + """ + Extracts and returns the content_encryption_key stored in the encryption_data object + and performs necessary validation on all parameters. + :param _EncryptionData encryption_data: + The encryption metadata of the retrieved value. + :param Optional[KeyEncryptionKey] key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key) + - Wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm() + - Returns the algorithm used to wrap the specified symmetric key. + get_kid() + - Returns a string key id for this key-encryption-key. + :param Optional[Callable[[str], KeyEncryptionKey]] key_resolver: + A function used that, given a key_id, will return a key_encryption_key. Please refer + to high-level service object instance variables for more details. + :return: The content_encryption_key stored in the encryption_data object. + :rtype: bytes + """ + + _validate_not_none('encrypted_key', encryption_data.wrapped_content_key.encrypted_key) + + # Validate we have the right info for the specified version + if encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V1: + _validate_not_none('content_encryption_IV', encryption_data.content_encryption_IV) + elif encryption_data.encryption_agent.protocol in _ENCRYPTION_V2_PROTOCOLS: + _validate_not_none('encrypted_region_info', encryption_data.encrypted_region_info) + else: + raise ValueError('Specified encryption version is not supported.') + + content_encryption_key: Optional[bytes] = None + + # If the resolver exists, give priority to the key it finds. + if key_resolver is not None: + key_encryption_key = key_resolver(encryption_data.wrapped_content_key.key_id) + + if key_encryption_key is None: + raise ValueError("Unable to decrypt. key_resolver and key_encryption_key cannot both be None.") + if not hasattr(key_encryption_key, 'get_kid') or not callable(key_encryption_key.get_kid): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid')) + if not hasattr(key_encryption_key, 'unwrap_key') or not callable(key_encryption_key.unwrap_key): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'unwrap_key')) + if encryption_data.wrapped_content_key.key_id != key_encryption_key.get_kid(): + raise ValueError('Provided or resolved key-encryption-key does not match the id of key used to encrypt.') + # Will throw an exception if the specified algorithm is not supported. + content_encryption_key = key_encryption_key.unwrap_key( + encryption_data.wrapped_content_key.encrypted_key, + encryption_data.wrapped_content_key.algorithm) + + # For V2, the version is included with the cek. We need to validate it + # and remove it from the actual cek. + if encryption_data.encryption_agent.protocol in _ENCRYPTION_V2_PROTOCOLS: + version_2_bytes = encryption_data.encryption_agent.protocol.encode().ljust(8, b'\0') + cek_version_bytes = content_encryption_key[:len(version_2_bytes)] + if cek_version_bytes != version_2_bytes: + raise ValueError('The encryption metadata is not valid and may have been modified.') + + # Remove version from the start of the cek. + content_encryption_key = content_encryption_key[len(version_2_bytes):] + + _validate_not_none('content_encryption_key', content_encryption_key) + + return content_encryption_key + + +def _decrypt_message( + message: bytes, + encryption_data: _EncryptionData, + key_encryption_key: Optional[KeyEncryptionKey] = None, + resolver: Optional[Callable[[str], KeyEncryptionKey]] = None +) -> bytes: + """ + Decrypts the given ciphertext using AES256 in CBC mode with 128 bit padding. + Unwraps the content-encryption-key using the user-provided or resolved key-encryption-key (kek). + Returns the original plaintext. + + :param bytes message: + The ciphertext to be decrypted. + :param _EncryptionData encryption_data: + The metadata associated with this ciphertext. + :param Optional[KeyEncryptionKey] key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key) + - Wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm() + - Returns the algorithm used to wrap the specified symmetric key. + get_kid() + - Returns a string key id for this key-encryption-key. + :param Optional[Callable[[str], KeyEncryptionKey]] resolver: + The user-provided key resolver. Uses the kid string to return a key-encryption-key + implementing the interface defined above. + :return: The decrypted plaintext. + :rtype: bytes + """ + _validate_not_none('message', message) + content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, resolver) + + if encryption_data.encryption_agent.protocol == _ENCRYPTION_PROTOCOL_V1: + if not encryption_data.content_encryption_IV: + raise ValueError("Missing required metadata for decryption.") + + cipher = _generate_AES_CBC_cipher(content_encryption_key, encryption_data.content_encryption_IV) + + # decrypt data + decryptor = cipher.decryptor() + decrypted_data = (decryptor.update(message) + decryptor.finalize()) + + # unpad data + unpadder = PKCS7(128).unpadder() + decrypted_data = (unpadder.update(decrypted_data) + unpadder.finalize()) + + elif encryption_data.encryption_agent.protocol in _ENCRYPTION_V2_PROTOCOLS: + block_info = encryption_data.encrypted_region_info + if not block_info or not block_info.nonce_length: + raise ValueError("Missing required metadata for decryption.") + + if encryption_data.encrypted_region_info is None: + raise ValueError("Missing required metadata for Encryption V2") + + nonce_length = int(encryption_data.encrypted_region_info.nonce_length) + + # First bytes are the nonce + nonce = message[:nonce_length] + ciphertext_with_tag = message[nonce_length:] + + aesgcm = AESGCM(content_encryption_key) + decrypted_data = aesgcm.decrypt(nonce, ciphertext_with_tag, None) + + else: + raise ValueError('Specified encryption version is not supported.') + + return decrypted_data + + +def encrypt_blob(blob: bytes, key_encryption_key: KeyEncryptionKey, version: str) -> Tuple[str, bytes]: + """ + Encrypts the given blob using the given encryption protocol version. + Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek). + Returns a json-formatted string containing the encryption metadata. This method should + only be used when a blob is small enough for single shot upload. Encrypting larger blobs + is done as a part of the upload_data_chunks method. + + :param bytes blob: + The blob to be encrypted. + :param KeyEncryptionKey key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key) + - Wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm() + - Returns the algorithm used to wrap the specified symmetric key. + get_kid() + - Returns a string key id for this key-encryption-key. + :param str version: The client encryption version to use. + :return: A tuple of json-formatted string containing the encryption metadata and the encrypted blob data. + :rtype: (str, bytes) + """ + + _validate_not_none('blob', blob) + _validate_not_none('key_encryption_key', key_encryption_key) + _validate_key_encryption_key_wrap(key_encryption_key) + + if version == _ENCRYPTION_PROTOCOL_V1: + # AES256 uses 256 bit (32 byte) keys and always with 16 byte blocks + content_encryption_key = os.urandom(32) + initialization_vector = os.urandom(16) + + cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector) + + # PKCS7 with 16 byte blocks ensures compatibility with AES. + padder = PKCS7(128).padder() + padded_data = padder.update(blob) + padder.finalize() + + # Encrypt the data. + encryptor = cipher.encryptor() + encrypted_data = encryptor.update(padded_data) + encryptor.finalize() + + elif version == _ENCRYPTION_PROTOCOL_V2: + # AES256 GCM uses 256 bit (32 byte) keys and a 12 byte nonce. + content_encryption_key = os.urandom(32) + initialization_vector = None + + data = BytesIO(blob) + encryption_stream = GCMBlobEncryptionStream(content_encryption_key, data) + + encrypted_data = encryption_stream.read() + + else: + raise ValueError("Invalid encryption version specified.") + + encryption_data = _generate_encryption_data_dict(key_encryption_key, content_encryption_key, + initialization_vector, version) + encryption_data['EncryptionMode'] = 'FullBlob' + + return dumps(encryption_data), encrypted_data + + +def generate_blob_encryption_data( + key_encryption_key: Optional[KeyEncryptionKey], + version: str +) -> Tuple[Optional[bytes], Optional[bytes], Optional[str]]: + """ + Generates the encryption_metadata for the blob. + + :param Optional[KeyEncryptionKey] key_encryption_key: + The key-encryption-key used to wrap the cek associate with this blob. + :param str version: The client encryption version to use. + :return: A tuple containing the cek and iv for this blob as well as the + serialized encryption metadata for the blob. + :rtype: (Optional[bytes], Optional[bytes], Optional[str]) + """ + + encryption_data = None + content_encryption_key = None + initialization_vector = None + if key_encryption_key: + _validate_key_encryption_key_wrap(key_encryption_key) + content_encryption_key = os.urandom(32) + # Initialization vector only needed for V1 + if version == _ENCRYPTION_PROTOCOL_V1: + initialization_vector = os.urandom(16) + encryption_data_dict = _generate_encryption_data_dict(key_encryption_key, + content_encryption_key, + initialization_vector, + version) + encryption_data_dict['EncryptionMode'] = 'FullBlob' + encryption_data = dumps(encryption_data_dict) + + return content_encryption_key, initialization_vector, encryption_data + + +def decrypt_blob( # pylint: disable=too-many-locals,too-many-statements + require_encryption: bool, + key_encryption_key: Optional[KeyEncryptionKey], + key_resolver: Optional[Callable[[str], KeyEncryptionKey]], + content: bytes, + start_offset: int, + end_offset: int, + response_headers: Dict[str, Any] +) -> bytes: + """ + Decrypts the given blob contents and returns only the requested range. + + :param bool require_encryption: + Whether the calling blob service requires objects to be decrypted. + :param Optional[KeyEncryptionKey] key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key) + - Wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm() + - Returns the algorithm used to wrap the specified symmetric key. + get_kid() + - Returns a string key id for this key-encryption-key. + :param key_resolver: + The user-provided key resolver. Uses the kid string to return a key-encryption-key + implementing the interface defined above. + :type key_resolver: Optional[Callable[[str], KeyEncryptionKey]] + :param bytes content: + The encrypted blob content. + :param int start_offset: + The adjusted offset from the beginning of the *decrypted* content for the caller's data. + :param int end_offset: + The adjusted offset from the end of the *decrypted* content for the caller's data. + :param Dict[str, Any] response_headers: + A dictionary of response headers from the download request. Expected to include the + 'x-ms-meta-encryptiondata' header if the blob was encrypted. + :return: The decrypted blob content. + :rtype: bytes + """ + try: + encryption_data = _dict_to_encryption_data(loads(response_headers['x-ms-meta-encryptiondata'])) + except Exception as exc: # pylint: disable=broad-except + if require_encryption: + raise ValueError( + 'Encryption required, but received data does not contain appropriate metadata.' + \ + 'Data was either not encrypted or metadata has been lost.') from exc + + return content + + algorithm = encryption_data.encryption_agent.encryption_algorithm + if algorithm not in(_EncryptionAlgorithm.AES_CBC_256, _EncryptionAlgorithm.AES_GCM_256): + raise ValueError('Specified encryption algorithm is not supported.') + + version = encryption_data.encryption_agent.protocol + if version not in _VALID_ENCRYPTION_PROTOCOLS: + raise ValueError('Specified encryption version is not supported.') + + content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, key_resolver) + + if version == _ENCRYPTION_PROTOCOL_V1: + blob_type = response_headers['x-ms-blob-type'] + + iv: Optional[bytes] = None + unpad = False + if 'content-range' in response_headers: + content_range = response_headers['content-range'] + # Format: 'bytes x-y/size' + + # Ignore the word 'bytes' + content_range = content_range.split(' ') + + content_range = content_range[1].split('-') + content_range = content_range[1].split('/') + end_range = int(content_range[0]) + blob_size = int(content_range[1]) + + if start_offset >= 16: + iv = content[:16] + content = content[16:] + start_offset -= 16 + else: + iv = encryption_data.content_encryption_IV + + if end_range == blob_size - 1: + unpad = True + else: + unpad = True + iv = encryption_data.content_encryption_IV + + if blob_type == 'PageBlob': + unpad = False + + if iv is None: + raise ValueError("Missing required metadata for Encryption V1") + + cipher = _generate_AES_CBC_cipher(content_encryption_key, iv) + decryptor = cipher.decryptor() + + content = decryptor.update(content) + decryptor.finalize() + if unpad: + unpadder = PKCS7(128).unpadder() + content = unpadder.update(content) + unpadder.finalize() + + return content[start_offset: len(content) - end_offset] + + if version in _ENCRYPTION_V2_PROTOCOLS: + # We assume the content contains only full encryption regions + total_size = len(content) + offset = 0 + + if encryption_data.encrypted_region_info is None: + raise ValueError("Missing required metadata for Encryption V2") + + nonce_length = encryption_data.encrypted_region_info.nonce_length + data_length = encryption_data.encrypted_region_info.data_length + tag_length = encryption_data.encrypted_region_info.tag_length + region_length = nonce_length + data_length + tag_length + + decrypted_content = bytearray() + while offset < total_size: + # Process one encryption region at a time + process_size = min(region_length, total_size) + encrypted_region = content[offset:offset + process_size] + + # First bytes are the nonce + nonce = encrypted_region[:nonce_length] + ciphertext_with_tag = encrypted_region[nonce_length:] + + aesgcm = AESGCM(content_encryption_key) + decrypted_data = aesgcm.decrypt(nonce, ciphertext_with_tag, None) + decrypted_content.extend(decrypted_data) + + offset += process_size + + # Read the caller requested data from the decrypted content + return decrypted_content[start_offset:end_offset] + + raise ValueError('Specified encryption version is not supported.') + + +def get_blob_encryptor_and_padder( + cek: Optional[bytes], + iv: Optional[bytes], + should_pad: bool +) -> Tuple[Optional["AEADEncryptionContext"], Optional["PaddingContext"]]: + encryptor = None + padder = None + + if cek is not None and iv is not None: + cipher = _generate_AES_CBC_cipher(cek, iv) + encryptor = cipher.encryptor() + padder = PKCS7(128).padder() if should_pad else None + + return encryptor, padder + + +def encrypt_queue_message(message: str, key_encryption_key: KeyEncryptionKey, version: str) -> str: + """ + Encrypts the given plain text message using the given protocol version. + Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek). + Returns a json-formatted string containing the encrypted message and the encryption metadata. + + :param str message: + The plain text message to be encrypted. + :param KeyEncryptionKey key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key) + - Wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm() + - Returns the algorithm used to wrap the specified symmetric key. + get_kid() + - Returns a string key id for this key-encryption-key. + :param str version: The client encryption version to use. + :return: A json-formatted string containing the encrypted message and the encryption metadata. + :rtype: str + """ + + _validate_not_none('message', message) + _validate_not_none('key_encryption_key', key_encryption_key) + _validate_key_encryption_key_wrap(key_encryption_key) + + # Queue encoding functions all return unicode strings, and encryption should + # operate on binary strings. + message_as_bytes: bytes = message.encode('utf-8') + + if version == _ENCRYPTION_PROTOCOL_V1: + # AES256 CBC uses 256 bit (32 byte) keys and always with 16 byte blocks + content_encryption_key = os.urandom(32) + initialization_vector = os.urandom(16) + + cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector) + + # PKCS7 with 16 byte blocks ensures compatibility with AES. + padder = PKCS7(128).padder() + padded_data = padder.update(message_as_bytes) + padder.finalize() + + # Encrypt the data. + encryptor = cipher.encryptor() + encrypted_data = encryptor.update(padded_data) + encryptor.finalize() + + elif version == _ENCRYPTION_PROTOCOL_V2: + # AES256 GCM uses 256 bit (32 byte) keys and a 12 byte nonce. + content_encryption_key = os.urandom(32) + initialization_vector = None + + # The nonce MUST be different for each key + nonce = os.urandom(12) + aesgcm = AESGCM(content_encryption_key) + + # Returns ciphertext + tag + cipertext_with_tag = aesgcm.encrypt(nonce, message_as_bytes, None) + encrypted_data = nonce + cipertext_with_tag + + else: + raise ValueError("Invalid encryption version specified.") + + # Build the dictionary structure. + queue_message = {'EncryptedMessageContents': encode_base64(encrypted_data), + 'EncryptionData': _generate_encryption_data_dict(key_encryption_key, + content_encryption_key, + initialization_vector, + version)} + + return dumps(queue_message) + + +def decrypt_queue_message( + message: str, + response: "PipelineResponse", + require_encryption: bool, + key_encryption_key: Optional[KeyEncryptionKey], + resolver: Optional[Callable[[str], KeyEncryptionKey]] +) -> str: + """ + Returns the decrypted message contents from an EncryptedQueueMessage. + If no encryption metadata is present, will return the unaltered message. + :param str message: + The JSON formatted QueueEncryptedMessage contents with all associated metadata. + :param Any response: + The pipeline response used to generate an error with. + :param bool require_encryption: + If set, will enforce that the retrieved messages are encrypted and decrypt them. + :param Optional[KeyEncryptionKey] key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key) + - Wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm() + - Returns the algorithm used to wrap the specified symmetric key. + get_kid() + - Returns a string key id for this key-encryption-key. + :param Optional[Callable[[str], KeyEncryptionKey]] resolver: + The user-provided key resolver. Uses the kid string to return a key-encryption-key + implementing the interface defined above. + :return: The plain text message from the queue message. + :rtype: str + """ + response = response.http_response + + try: + deserialized_message: Dict[str, Any] = loads(message) + + encryption_data = _dict_to_encryption_data(deserialized_message['EncryptionData']) + decoded_data = decode_base64_to_bytes(deserialized_message['EncryptedMessageContents']) + except (KeyError, ValueError) as exc: + # Message was not json formatted and so was not encrypted + # or the user provided a json formatted message + # or the metadata was malformed. + if require_encryption: + raise ValueError( + 'Encryption required, but received message does not contain appropriate metatadata. ' + \ + 'Message was either not encrypted or metadata was incorrect.') from exc + + return message + try: + return _decrypt_message(decoded_data, encryption_data, key_encryption_key, resolver).decode('utf-8') + except Exception as error: + raise HttpResponseError( + message="Decryption failed.", + response=response, #type: ignore [arg-type] + error=error) from error diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/__init__.py index cc760e7efd22..62dc43a7722a 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/__init__.py @@ -7,10 +7,17 @@ # -------------------------------------------------------------------------- from ._azure_blob_storage import AzureBlobStorage -__all__ = ['AzureBlobStorage'] try: - from ._patch import patch_sdk # type: ignore - patch_sdk() + from ._patch import __all__ as _patch_all + from ._patch import * # pylint: disable=unused-wildcard-import except ImportError: - pass + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AzureBlobStorage", +] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_azure_blob_storage.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_azure_blob_storage.py index dff7e12276c2..a429b713b744 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_azure_blob_storage.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_azure_blob_storage.py @@ -6,35 +6,34 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +from copy import deepcopy +from typing import Any +from typing_extensions import Self from azure.core import PipelineClient -from msrest import Deserializer, Serializer - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse +from . import models as _models from ._configuration import AzureBlobStorageConfiguration -from .operations import ServiceOperations -from .operations import ContainerOperations -from .operations import DirectoryOperations -from .operations import BlobOperations -from .operations import PageBlobOperations -from .operations import AppendBlobOperations -from .operations import BlockBlobOperations -from . import models +from ._serialization import Deserializer, Serializer +from .operations import ( + AppendBlobOperations, + BlobOperations, + BlockBlobOperations, + ContainerOperations, + PageBlobOperations, + ServiceOperations, +) -class AzureBlobStorage(object): +class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword """AzureBlobStorage. :ivar service: ServiceOperations operations :vartype service: azure.storage.blob.operations.ServiceOperations :ivar container: ContainerOperations operations :vartype container: azure.storage.blob.operations.ContainerOperations - :ivar directory: DirectoryOperations operations - :vartype directory: azure.storage.blob.operations.DirectoryOperations :ivar blob: BlobOperations operations :vartype blob: azure.storage.blob.operations.BlobOperations :ivar page_blob: PageBlobOperations operations @@ -43,49 +42,78 @@ class AzureBlobStorage(object): :vartype append_blob: azure.storage.blob.operations.AppendBlobOperations :ivar block_blob: BlockBlobOperations operations :vartype block_blob: azure.storage.blob.operations.BlockBlobOperations - :param url: The URL of the service account, container, or blob that is the targe of the desired operation. + :param url: The URL of the service account, container, or blob that is the target of the + desired operation. Required. :type url: str + :param base_url: Service URL. Required. Default value is "". + :type base_url: str + :keyword version: Specifies the version of the operation to use for this request. Default value + is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + :paramtype version: str """ - def __init__( - self, - url, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - base_url = '{url}' - self._config = AzureBlobStorageConfiguration(url, **kwargs) - self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs) - - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + def __init__( # pylint: disable=missing-client-constructor-parameter-credential + self, url: str, base_url: str = "", **kwargs: Any + ) -> None: + self._config = AzureBlobStorageConfiguration(url=url, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=base_url, policies=_policies, **kwargs) + + client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) + self.container = ContainerOperations(self._client, self._config, self._serialize, self._deserialize) + self.blob = BlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.page_blob = PageBlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.append_blob = AppendBlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.block_blob = BlockBlobOperations(self._client, self._config, self._serialize, self._deserialize) + + def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore - self.service = ServiceOperations( - self._client, self._config, self._serialize, self._deserialize) - self.container = ContainerOperations( - self._client, self._config, self._serialize, self._deserialize) - self.directory = DirectoryOperations( - self._client, self._config, self._serialize, self._deserialize) - self.blob = BlobOperations( - self._client, self._config, self._serialize, self._deserialize) - self.page_blob = PageBlobOperations( - self._client, self._config, self._serialize, self._deserialize) - self.append_blob = AppendBlobOperations( - self._client, self._config, self._serialize, self._deserialize) - self.block_blob = BlockBlobOperations( - self._client, self._config, self._serialize, self._deserialize) - - def close(self): - # type: () -> None + def close(self) -> None: self._client.close() - def __enter__(self): - # type: () -> AzureBlobStorage + def __enter__(self) -> Self: self._client.__enter__() return self - def __exit__(self, *exc_details): - # type: (Any) -> None + def __exit__(self, *exc_details: Any) -> None: self._client.__exit__(*exc_details) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_configuration.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_configuration.py index 6c37b2421150..80772c6e3aa2 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_configuration.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_configuration.py @@ -6,53 +6,46 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +from typing import Any, Literal -from azure.core.configuration import Configuration from azure.core.pipeline import policies -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any - VERSION = "unknown" -class AzureBlobStorageConfiguration(Configuration): + +class AzureBlobStorageConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long """Configuration for AzureBlobStorage. Note that all parameters used to create this instance are saved as instance attributes. - :param url: The URL of the service account, container, or blob that is the targe of the desired operation. + :param url: The URL of the service account, container, or blob that is the target of the + desired operation. Required. :type url: str + :keyword version: Specifies the version of the operation to use for this request. Default value + is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + :paramtype version: str """ - def __init__( - self, - url, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + def __init__(self, url: str, **kwargs: Any) -> None: + version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05") + if url is None: raise ValueError("Parameter 'url' must not be None.") - super(AzureBlobStorageConfiguration, self).__init__(**kwargs) self.url = url - self.version = "2020-04-08" - kwargs.setdefault('sdk_moniker', 'azureblobstorage/{}'.format(VERSION)) + self.version = version + kwargs.setdefault("sdk_moniker", "azureblobstorage/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) - def _configure( - self, - **kwargs # type: Any - ): - # type: (...) -> None - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_patch.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_patch.py new file mode 100644 index 000000000000..4688ca7f8ac2 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_patch.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# This file is used for handwritten extensions to the generated code. Example: +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md + + +def patch_sdk(): + pass diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_serialization.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_serialization.py new file mode 100644 index 000000000000..01a226bd7f14 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/_serialization.py @@ -0,0 +1,2115 @@ +# pylint: disable=too-many-lines +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + TypeVar, + MutableMapping, + Type, + List, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +ModelType = TypeVar("ModelType", bound="Model") +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0. + + :param datetime.datetime dt: The datetime + :returns: The offset + :rtype: datetime.timedelta + """ + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation. + + :param datetime.datetime dt: The datetime + :returns: The timestamp representation + :rtype: str + """ + return "Z" + + def dst(self, dt): + """No daylight saving for UTC. + + :param datetime.datetime dt: The datetime + :returns: The daylight saving time + :rtype: datetime.timedelta + """ + return datetime.timedelta(hours=1) + + +try: + from datetime import timezone as _FixedOffset # type: ignore +except ImportError: # Python 2.7 + + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset): + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[Dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + :rtype: ModelType + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls: Type[ModelType], + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> ModelType: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + :rtype: ModelType + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + Remove the polymorphic key from the initial data. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer(object): # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None): + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None): + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k for k, v in response._validation.items() if v.get("readonly") # pylint: disable=protected-access + ] + const = [ + k for k, v in response._validation.items() if v.get("constant") # pylint: disable=protected-access + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises: DeserializationError if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/__init__.py index 12cfcf636c47..62dc43a7722a 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/__init__.py @@ -7,4 +7,17 @@ # -------------------------------------------------------------------------- from ._azure_blob_storage import AzureBlobStorage -__all__ = ['AzureBlobStorage'] + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AzureBlobStorage", +] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/_azure_blob_storage.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/_azure_blob_storage.py index b53703478e99..9a06e367a4d2 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/_azure_blob_storage.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/_azure_blob_storage.py @@ -6,31 +6,34 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any +from copy import deepcopy +from typing import Any, Awaitable +from typing_extensions import Self from azure.core import AsyncPipelineClient -from msrest import Deserializer, Serializer +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest +from .. import models as _models +from .._serialization import Deserializer, Serializer from ._configuration import AzureBlobStorageConfiguration -from .operations import ServiceOperations -from .operations import ContainerOperations -from .operations import DirectoryOperations -from .operations import BlobOperations -from .operations import PageBlobOperations -from .operations import AppendBlobOperations -from .operations import BlockBlobOperations -from .. import models +from .operations import ( + AppendBlobOperations, + BlobOperations, + BlockBlobOperations, + ContainerOperations, + PageBlobOperations, + ServiceOperations, +) -class AzureBlobStorage(object): +class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword """AzureBlobStorage. :ivar service: ServiceOperations operations :vartype service: azure.storage.blob.aio.operations.ServiceOperations :ivar container: ContainerOperations operations :vartype container: azure.storage.blob.aio.operations.ContainerOperations - :ivar directory: DirectoryOperations operations - :vartype directory: azure.storage.blob.aio.operations.DirectoryOperations :ivar blob: BlobOperations operations :vartype blob: azure.storage.blob.aio.operations.BlobOperations :ivar page_blob: PageBlobOperations operations @@ -39,45 +42,80 @@ class AzureBlobStorage(object): :vartype append_blob: azure.storage.blob.aio.operations.AppendBlobOperations :ivar block_blob: BlockBlobOperations operations :vartype block_blob: azure.storage.blob.aio.operations.BlockBlobOperations - :param url: The URL of the service account, container, or blob that is the targe of the desired operation. + :param url: The URL of the service account, container, or blob that is the target of the + desired operation. Required. :type url: str + :param base_url: Service URL. Required. Default value is "". + :type base_url: str + :keyword version: Specifies the version of the operation to use for this request. Default value + is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + :paramtype version: str """ - def __init__( - self, - url: str, - **kwargs: Any + def __init__( # pylint: disable=missing-client-constructor-parameter-credential + self, url: str, base_url: str = "", **kwargs: Any ) -> None: - base_url = '{url}' - self._config = AzureBlobStorageConfiguration(url, **kwargs) - self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs) + self._config = AzureBlobStorageConfiguration(url=url, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=base_url, policies=_policies, **kwargs) - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) + self.container = ContainerOperations(self._client, self._config, self._serialize, self._deserialize) + self.blob = BlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.page_blob = PageBlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.append_blob = AppendBlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.block_blob = BlockBlobOperations(self._client, self._config, self._serialize, self._deserialize) + + def _send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ - self.service = ServiceOperations( - self._client, self._config, self._serialize, self._deserialize) - self.container = ContainerOperations( - self._client, self._config, self._serialize, self._deserialize) - self.directory = DirectoryOperations( - self._client, self._config, self._serialize, self._deserialize) - self.blob = BlobOperations( - self._client, self._config, self._serialize, self._deserialize) - self.page_blob = PageBlobOperations( - self._client, self._config, self._serialize, self._deserialize) - self.append_blob = AppendBlobOperations( - self._client, self._config, self._serialize, self._deserialize) - self.block_blob = BlockBlobOperations( - self._client, self._config, self._serialize, self._deserialize) + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "AzureBlobStorage": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self - async def __aexit__(self, *exc_details) -> None: + async def __aexit__(self, *exc_details: Any) -> None: await self._client.__aexit__(*exc_details) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/_configuration.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/_configuration.py index 5727357d92f7..5128a4f98b2a 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/_configuration.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/_configuration.py @@ -6,47 +6,46 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any +from typing import Any, Literal -from azure.core.configuration import Configuration from azure.core.pipeline import policies VERSION = "unknown" -class AzureBlobStorageConfiguration(Configuration): + +class AzureBlobStorageConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long """Configuration for AzureBlobStorage. Note that all parameters used to create this instance are saved as instance attributes. - :param url: The URL of the service account, container, or blob that is the targe of the desired operation. + :param url: The URL of the service account, container, or blob that is the target of the + desired operation. Required. :type url: str + :keyword version: Specifies the version of the operation to use for this request. Default value + is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + :paramtype version: str """ - def __init__( - self, - url: str, - **kwargs: Any - ) -> None: + def __init__(self, url: str, **kwargs: Any) -> None: + version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05") + if url is None: raise ValueError("Parameter 'url' must not be None.") - super(AzureBlobStorageConfiguration, self).__init__(**kwargs) self.url = url - self.version = "2020-04-08" - kwargs.setdefault('sdk_moniker', 'azureblobstorage/{}'.format(VERSION)) + self.version = version + kwargs.setdefault("sdk_moniker", "azureblobstorage/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) - def _configure( - self, - **kwargs: Any - ) -> None: - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/_patch.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/_patch.py new file mode 100644 index 000000000000..4688ca7f8ac2 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/_patch.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# This file is used for handwritten extensions to the generated code. Example: +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md + + +def patch_sdk(): + pass diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/__init__.py index 62f85c9290c1..1be05c7aa9a7 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/__init__.py @@ -8,18 +8,22 @@ from ._service_operations import ServiceOperations from ._container_operations import ContainerOperations -from ._directory_operations import DirectoryOperations from ._blob_operations import BlobOperations from ._page_blob_operations import PageBlobOperations from ._append_blob_operations import AppendBlobOperations from ._block_blob_operations import BlockBlobOperations +from ._patch import __all__ as _patch_all +from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk + __all__ = [ - 'ServiceOperations', - 'ContainerOperations', - 'DirectoryOperations', - 'BlobOperations', - 'PageBlobOperations', - 'AppendBlobOperations', - 'BlockBlobOperations', + "ServiceOperations", + "ContainerOperations", + "BlobOperations", + "PageBlobOperations", + "AppendBlobOperations", + "BlockBlobOperations", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_append_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_append_blob_operations.py index 333cb9f0e5aa..e466bdaba168 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_append_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_append_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,62 +7,84 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ... import models as _models - -T = TypeVar('T') +from ...operations._append_blob_operations import ( + build_append_block_from_url_request, + build_append_block_request, + build_create_request, + build_seal_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class AppendBlobOperations: - """AppendBlobOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class AppendBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`append_blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def create( + @distributed_trace_async + async def create( # pylint: disable=inconsistent-return-statements self, content_length: int, timeout: Optional[int] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", + metadata: Optional[Dict[str, str]] = None, request_id_parameter: Optional[str] = None, blob_tags_string: Optional[str] = None, - blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Create Append Blob operation creates a new append blob. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -69,38 +92,51 @@ async def create( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["AppendBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "AppendBlob")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -110,6 +146,7 @@ async def create( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -117,361 +154,371 @@ async def create( _if_none_match = None _if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - blob_type = "AppendBlob" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_create_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + blob_type=blob_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - create.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def append_block( + @distributed_trace_async + async def append_block( # pylint: disable=inconsistent-return-statements self, content_length: int, - body: IO, + body: IO[bytes], timeout: Optional[int] = None, - transactional_content_md5: Optional[bytearray] = None, - transactional_content_crc64: Optional[bytearray] = None, - encryption_algorithm: Optional[str] = "AES256", + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - append_position_access_conditions: Optional["_models.AppendPositionAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Append Block operation commits a new block of data to the end of an existing append blob. The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param append_position_access_conditions: Parameter group. - :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions - :param cpk_info: Parameter group. + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _max_size = None _append_position = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if append_position_access_conditions is not None: - _max_size = append_position_access_conditions.max_size _append_position = append_position_access_conditions.append_position + _max_size = append_position_access_conditions.max_size if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "appendblock" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.append_block.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _max_size is not None: - header_parameters['x-ms-blob-condition-maxsize'] = self._serialize.header("max_size", _max_size, 'long') - if _append_position is not None: - header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = body + + _request = build_append_block_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + lease_id=_lease_id, + max_size=_max_size, + append_position=_append_position, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-append-offset"] = self._deserialize( + "str", response.headers.get("x-ms-blob-append-offset") + ) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) - - append_block.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def append_block_from_url( + @distributed_trace_async + async def append_block_from_url( # pylint: disable=inconsistent-return-statements self, source_url: str, content_length: int, source_range: Optional[str] = None, - source_content_md5: Optional[bytearray] = None, - source_contentcrc64: Optional[bytearray] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, timeout: Optional[int] = None, - transactional_content_md5: Optional[bytearray] = None, - encryption_algorithm: Optional[str] = "AES256", + transactional_content_md5: Optional[bytes] = None, request_id_parameter: Optional[str] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - append_position_access_conditions: Optional["_models.AppendPositionAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - **kwargs + copy_source_authorization: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Append Block operation commits a new block of data to the end of an existing append blob where the contents are read from a source url. The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - :param source_url: Specify a URL to the copy source. + :param source_url: Specify a URL to the copy source. Required. :type source_url: str - :param content_length: The length of the request. - :type content_length: long - :param source_range: Bytes of source data in the specified range. + :param content_length: The length of the request. Required. + :type content_length: int + :param source_range: Bytes of source data in the specified range. Default value is None. :type source_range: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray + from the copy source. Default value is None. + :type source_content_md5: bytes :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. - :type source_contentcrc64: bytearray + read from the copy source. Default value is None. + :type source_contentcrc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + by the service. Default value is None. + :type transactional_content_md5: bytes :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param append_position_access_conditions: Parameter group. - :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions - :param modified_access_conditions: Parameter group. + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _lease_id = None _max_size = None @@ -485,225 +532,207 @@ async def append_block_from_url( _source_if_unmodified_since = None _source_if_match = None _source_if_none_match = None - if append_position_access_conditions is not None: - _max_size = append_position_access_conditions.max_size - _append_position = append_position_access_conditions.append_position if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id + if append_position_access_conditions is not None: + _append_position = append_position_access_conditions.append_position + _max_size = append_position_access_conditions.max_size if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match - comp = "appendblock" - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_append_block_from_url_request( + url=self._config.url, + source_url=source_url, + content_length=content_length, + source_range=source_range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + max_size=_max_size, + append_position=_append_position, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.append_block_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') - if source_range is not None: - header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if source_contentcrc64 is not None: - header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _max_size is not None: - header_parameters['x-ms-blob-condition-maxsize'] = self._serialize.header("max_size", _max_size, 'long') - if _append_position is not None: - header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-append-offset"] = self._deserialize( + "str", response.headers.get("x-ms-blob-append-offset") + ) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - append_block_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def seal( + @distributed_trace_async + async def seal( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - append_position_access_conditions: Optional["_models.AppendPositionAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version 2019-12-12 version or later. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param append_position_access_conditions: Parameter group. - :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["seal"] = kwargs.pop("comp", _params.pop("comp", "seal")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _append_position = None - if append_position_access_conditions is not None: - _append_position = append_position_access_conditions.append_position if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match - comp = "seal" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if append_position_access_conditions is not None: + _append_position = append_position_access_conditions.append_position + + _request = build_seal_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + append_position=_append_position, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.seal.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _append_position is not None: - header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) if cls: - return cls(pipeline_response, None, response_headers) - - seal.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_blob_operations.py index 687bcd30dad6..94f1b103bd71 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,40 +7,80 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, AsyncIterator, Callable, Dict, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ... import models as _models - -T = TypeVar('T') +from ...operations._blob_operations import ( + build_abort_copy_from_url_request, + build_acquire_lease_request, + build_break_lease_request, + build_change_lease_request, + build_copy_from_url_request, + build_create_snapshot_request, + build_delete_immutability_policy_request, + build_delete_request, + build_download_request, + build_get_account_info_request, + build_get_properties_request, + build_get_tags_request, + build_query_request, + build_release_lease_request, + build_renew_lease_request, + build_set_expiry_request, + build_set_http_headers_request, + build_set_immutability_policy_request, + build_set_legal_hold_request, + build_set_metadata_request, + build_set_tags_request, + build_set_tier_request, + build_start_copy_from_url_request, + build_undelete_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class BlobOperations: - """BlobOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class BlobOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace_async async def download( self, snapshot: Optional[str] = None, @@ -48,469 +89,596 @@ async def download( range: Optional[str] = None, range_get_content_md5: Optional[bool] = None, range_get_content_crc64: Optional[bool] = None, - encryption_algorithm: Optional[str] = "AES256", + structured_body_type: Optional[str] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> IO: + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + # pylint: disable=line-too-long """The Download operation reads or downloads a blob from the system, including its metadata and properties. You can also call Download to read a snapshot. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str :param range_get_content_md5: When set to true and specified together with the Range, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB - in size. + in size. Default value is None. :type range_get_content_md5: bool :param range_get_content_crc64: When set to true and specified together with the Range, the service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 - MB in size. + MB in size. Default value is None. :type range_get_content_crc64: bool - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + :param structured_body_type: Specifies the response content should be returned as a structured + message and specifies the message schema version and properties. Default value is None. + :type structured_body_type: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IO, or the result of cls(response) - :rtype: IO - :raises: ~azure.core.exceptions.HttpResponseError + :return: AsyncIterator[bytes] or the result of cls(response) + :rtype: AsyncIterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[IO] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_download_request( + url=self._config.url, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + range=range, + lease_id=_lease_id, + range_get_content_md5=range_get_content_md5, + range_get_content_crc64=range_get_content_crc64, + structured_body_type=structured_body_type, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.download.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if range_get_content_md5 is not None: - header_parameters['x-ms-range-get-content-md5'] = self._serialize.header("range_get_content_md5", range_get_content_md5, 'bool') - if range_get_content_crc64 is not None: - header_parameters['x-ms-range-get-content-crc64'] = self._serialize.header("range_get_content_crc64", range_get_content_crc64, 'bool') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} if response.status_code == 200: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) - response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) - response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) + response_headers["x-ms-structured-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-structured-content-length") + ) if response.status_code == 206: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) - response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) - response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) + response_headers["x-ms-structured-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-structured-content-length") + ) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - download.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore - async def get_properties( + @distributed_trace_async + async def get_properties( # pylint: disable=inconsistent-return-statements self, snapshot: Optional[str] = None, version_id: Optional[str] = None, timeout: Optional[int] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system properties for the blob. It does not return the content of the blob. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_get_properties_request( + url=self._config.url, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.head(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-creation-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-creation-time')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) - response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-incremental-copy']=self._deserialize('bool', response.headers.get('x-ms-incremental-copy')) - response_headers['x-ms-copy-destination-snapshot']=self._deserialize('str', response.headers.get('x-ms-copy-destination-snapshot')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-access-tier']=self._deserialize('str', response.headers.get('x-ms-access-tier')) - response_headers['x-ms-access-tier-inferred']=self._deserialize('bool', response.headers.get('x-ms-access-tier-inferred')) - response_headers['x-ms-archive-status']=self._deserialize('str', response.headers.get('x-ms-archive-status')) - response_headers['x-ms-access-tier-change-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-access-tier-change-time')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) - response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) - response_headers['x-ms-expiry-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-expiry-time')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) - response_headers['x-ms-rehydrate-priority']=self._deserialize('str', response.headers.get('x-ms-rehydrate-priority')) - response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-incremental-copy"] = self._deserialize( + "bool", response.headers.get("x-ms-incremental-copy") + ) + response_headers["x-ms-copy-destination-snapshot"] = self._deserialize( + "str", response.headers.get("x-ms-copy-destination-snapshot") + ) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-access-tier"] = self._deserialize("str", response.headers.get("x-ms-access-tier")) + response_headers["x-ms-access-tier-inferred"] = self._deserialize( + "bool", response.headers.get("x-ms-access-tier-inferred") + ) + response_headers["x-ms-archive-status"] = self._deserialize("str", response.headers.get("x-ms-archive-status")) + response_headers["x-ms-access-tier-change-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-access-tier-change-time") + ) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-rehydrate-priority"] = self._deserialize( + "str", response.headers.get("x-ms-rehydrate-priority") + ) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - get_properties.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def delete( + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements self, snapshot: Optional[str] = None, version_id: Optional[str] = None, timeout: Optional[int] = None, - delete_snapshots: Optional[Union[str, "_models.DeleteSnapshotsOptionType"]] = None, + delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, request_id_parameter: Optional[str] = None, - blob_delete_type: Optional[str] = "Permanent", - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + blob_delete_type: Literal["Permanent"] = "Permanent", + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is permanently removed from the storage account. If the storage account's soft delete feature is enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible immediately. However, the blob service retains the blob or snapshot for the number of days - specified by the DeleteRetentionPolicy section of [Storage service properties] (Set-Blob- - Service-Properties.md). After the specified number of days has passed, the blob's data is - permanently removed from the storage account. Note that you continue to be charged for the - soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify - the "include=deleted" query parameter to discover which blobs and snapshots have been soft - deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other + specified by the DeleteRetentionPolicy section of [Storage service properties] + (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's + data is permanently removed from the storage account. Note that you continue to be charged for + the soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and + specify the "include=deleted" query parameter to discover which blobs and snapshots have been + soft deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 (ResourceNotFound). :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param delete_snapshots: Required if the blob has associated snapshots. Specify one of the following two options: include: Delete the base blob and all of its snapshots. only: Delete - only the blob's snapshots and not the blob itself. + only the blob's snapshots and not the blob itself. Known values are: "include" and "only". + Default value is None. :type delete_snapshots: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param blob_delete_type: Optional. Only possible value is 'permanent', which specifies to - permanently delete a blob if blob soft delete is enabled. + permanently delete a blob if blob soft delete is enabled. Known values are "Permanent" and + None. Default value is "Permanent". :type blob_delete_type: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -520,793 +688,634 @@ async def delete( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_delete_request( + url=self._config.url, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + lease_id=_lease_id, + delete_snapshots=delete_snapshots, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_delete_type=blob_delete_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if blob_delete_type is not None: - query_parameters['deletetype'] = self._serialize.query("blob_delete_type", blob_delete_type, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if delete_snapshots is not None: - header_parameters['x-ms-delete-snapshots'] = self._serialize.header("delete_snapshots", delete_snapshots, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - delete.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def set_access_control( - self, - timeout: Optional[int] = None, - owner: Optional[str] = None, - group: Optional[str] = None, - posix_permissions: Optional[str] = None, - posix_acl: Optional[str] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + @distributed_trace_async + async def undelete( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - """Set the owner, group, permissions, or access control list for a blob. + # pylint: disable=line-too-long + """Undelete a blob that was previously soft deleted. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param owner: Optional. The owner of the blob or directory. - :type owner: str - :param group: Optional. The owning group of the blob or directory. - :type group: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". - :type posix_acl: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - action = "setAccessControl" - accept = "application/xml" + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_undelete_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.set_access_control.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['action'] = self._serialize.query("action", action, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if owner is not None: - header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str') - if group is not None: - header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_acl is not None: - header_parameters['x-ms-acl'] = self._serialize.header("posix_acl", posix_acl, 'str') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.patch(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def get_access_control( + @distributed_trace_async + async def set_expiry( # pylint: disable=inconsistent-return-statements self, + expiry_options: Union[str, _models.BlobExpiryOptions], timeout: Optional[int] = None, - upn: Optional[bool] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + expires_on: Optional[str] = None, + **kwargs: Any ) -> None: - """Get the owner, group, permissions, or access control list for a blob. + # pylint: disable=line-too-long + """Sets the time a blob will expire and be deleted. + :param expiry_options: Required. Indicates mode of the expiry time. Known values are: + "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Required. + :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If - "true", the identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response - headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If - "false", the values will be returned as Azure Active Directory Object IDs. The default value is - false. - :type upn: bool :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param expires_on: The time to set the blob to expiry. Default value is None. + :type expires_on: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - action = "getAccessControl" - accept = "application/xml" + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_set_expiry_request( + url=self._config.url, + expiry_options=expiry_options, + timeout=timeout, + request_id_parameter=request_id_parameter, + expires_on=expires_on, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_access_control.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['action'] = self._serialize.query("action", action, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if upn is not None: - query_parameters['upn'] = self._serialize.query("upn", upn, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.head(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-owner']=self._deserialize('str', response.headers.get('x-ms-owner')) - response_headers['x-ms-group']=self._deserialize('str', response.headers.get('x-ms-group')) - response_headers['x-ms-permissions']=self._deserialize('str', response.headers.get('x-ms-permissions')) - response_headers['x-ms-acl']=self._deserialize('str', response.headers.get('x-ms-acl')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - get_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def rename( + @distributed_trace_async + async def set_http_headers( # pylint: disable=inconsistent-return-statements self, - rename_source: str, timeout: Optional[int] = None, - path_rename_mode: Optional[Union[str, "_models.PathRenameMode"]] = None, - directory_properties: Optional[str] = None, - posix_permissions: Optional[str] = None, - posix_umask: Optional[str] = None, - source_lease_id: Optional[str] = None, request_id_parameter: Optional[str] = None, - directory_http_headers: Optional["_models.DirectoryHttpHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - **kwargs + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: - """Rename a blob/file. By default, the destination is overwritten and if the destination already - exists and has a lease the lease is broken. This operation supports conditional HTTP requests. - For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. To fail if the destination already exists, use a conditional - request with If-None-Match: "*". - - :param rename_source: The file or directory to be renamed. The value must have the following - format: "/{filesysystem}/{path}". If "x-ms-properties" is specified, the properties will - overwrite the existing properties; otherwise, the existing properties will be preserved. - :type rename_source: str + # pylint: disable=line-too-long + """The Set HTTP Headers operation sets system properties on the blob. + :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param path_rename_mode: Determines the behavior of the rename operation. - :type path_rename_mode: str or ~azure.storage.blob.models.PathRenameMode - :param directory_properties: Optional. User-defined properties to be stored with the file or - directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", - where each value is base64 encoded. - :type directory_properties: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_umask: Only valid if Hierarchical Namespace is enabled for the account. This umask - restricts permission settings for file and directory, and will only be applied when default Acl - does not exist in parent directory. If the umask bit has set, it means that the corresponding - permission will be disabled. Otherwise the corresponding permission will be determined by the - permission. A 4-digit octal notation (e.g. 0022) is supported here. If no umask was specified, - a default umask - 0027 will be used. - :type posix_umask: str - :param source_lease_id: A lease ID for the source path. If specified, the source path must have - an active lease and the lease ID must match. - :type source_lease_id: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param directory_http_headers: Parameter group. - :type directory_http_headers: ~azure.storage.blob.models.DirectoryHttpHeaders - :param lease_access_conditions: Parameter group. + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _cache_control = None - _content_type = None - _content_encoding = None - _content_language = None - _content_disposition = None + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _blob_cache_control = None + _blob_content_type = None + _blob_content_md5 = None + _blob_content_encoding = None + _blob_content_language = None _lease_id = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - if directory_http_headers is not None: - _cache_control = directory_http_headers.cache_control - _content_type = directory_http_headers.content_type - _content_encoding = directory_http_headers.content_encoding - _content_language = directory_http_headers.content_language - _content_disposition = directory_http_headers.content_disposition + _if_tags = None + _blob_content_disposition = None + if blob_http_headers is not None: + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_content_type = blob_http_headers.blob_content_type if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match - if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_none_match = source_modified_access_conditions.source_if_none_match - accept = "application/xml" + _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_set_http_headers_request( + url=self._config.url, + timeout=timeout, + blob_cache_control=_blob_cache_control, + blob_content_type=_blob_content_type, + blob_content_md5=_blob_content_md5, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_content_disposition=_blob_content_disposition, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.rename.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if path_rename_mode is not None: - query_parameters['mode'] = self._serialize.query("path_rename_mode", path_rename_mode, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-rename-source'] = self._serialize.header("rename_source", rename_source, 'str') - if directory_properties is not None: - header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_umask is not None: - header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str') - if _cache_control is not None: - header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str') - if _content_type is not None: - header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str') - if _content_encoding is not None: - header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str') - if _content_language is not None: - header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str') - if _content_disposition is not None: - header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if source_lease_id is not None: - header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [201]: + if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - rename.metadata = {'url': '/{filesystem}/{path}'} # type: ignore - - async def undelete( + @distributed_trace_async + async def set_immutability_policy( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: - """Undelete a blob that was previously soft deleted. + # pylint: disable=line-too-long + """The Set Immutability Policy operation sets the immutability policy on the blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "undelete" - accept = "application/xml" - - # Construct URL - url = self.undelete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_set_immutability_policy_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + if_unmodified_since=_if_unmodified_since, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + snapshot=snapshot, + version_id=version_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) if cls: - return cls(pipeline_response, None, response_headers) - - undelete.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def set_expiry( + @distributed_trace_async + async def delete_immutability_policy( # pylint: disable=inconsistent-return-statements self, - expiry_options: Union[str, "_models.BlobExpiryOptions"], timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - expires_on: Optional[str] = None, - **kwargs + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any ) -> None: - """Sets the time a blob will expire and be deleted. + # pylint: disable=line-too-long + """The Delete Immutability Policy operation deletes the immutability policy on the blob. - :param expiry_options: Required. Indicates mode of the expiry time. - :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param expires_on: The time to set the blob to expiry. - :type expires_on: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "expiry" - accept = "application/xml" - - # Construct URL - url = self.set_expiry.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str') - if expires_on is not None: - header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_delete_immutability_policy_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_expiry.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def set_http_headers( + @distributed_trace_async + async def set_legal_hold( # pylint: disable=inconsistent-return-statements self, + legal_hold: bool, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any ) -> None: - """The Set HTTP Headers operation sets system properties on the blob. + # pylint: disable=line-too-long + """The Set Legal Hold operation sets a legal hold on the blob. + :param legal_hold: Specified if a legal hold should be set on the blob. Required. + :type legal_hold: bool :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_http_headers: Parameter group. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _blob_cache_control = None - _blob_content_type = None - _blob_content_md5 = None - _blob_content_encoding = None - _blob_content_language = None - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _blob_content_disposition = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_type = blob_http_headers.blob_content_type - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_disposition = blob_http_headers.blob_content_disposition - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - comp = "properties" - accept = "application/xml" + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["legalhold"] = kwargs.pop("comp", _params.pop("comp", "legalhold")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_set_legal_hold_request( + url=self._config.url, + legal_hold=legal_hold, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.set_http_headers.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - set_http_headers.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def set_metadata( + @distributed_trace_async + async def set_metadata( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", + metadata: Optional[Dict[str, str]] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more name-value pairs. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -1314,681 +1323,663 @@ async def set_metadata( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "metadata" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_set_metadata_request( + url=self._config.url, + timeout=timeout, + metadata=metadata, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.set_metadata.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - set_metadata.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def acquire_lease( + @distributed_trace_async + async def acquire_lease( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, duration: Optional[int] = None, proposed_lease_id: Optional[str] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease - duration cannot be changed using renew or change. + duration cannot be changed using renew or change. Default value is None. :type duration: int :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Default value is None. :type proposed_lease_id: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "acquire" - accept = "application/xml" - - # Construct URL - url = self.acquire_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if duration is not None: - header_parameters['x-ms-lease-duration'] = self._serialize.header("duration", duration, 'int') - if proposed_lease_id is not None: - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_acquire_lease_request( + url=self._config.url, + timeout=timeout, + duration=duration, + proposed_lease_id=proposed_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - acquire_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def release_lease( + @distributed_trace_async + async def release_lease( # pylint: disable=inconsistent-return-statements self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "release" - accept = "application/xml" - - # Construct URL - url = self.release_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_release_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - release_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def renew_lease( + @distributed_trace_async + async def renew_lease( # pylint: disable=inconsistent-return-statements self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "renew" - accept = "application/xml" - - # Construct URL - url = self.renew_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_renew_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - renew_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def change_lease( + @distributed_trace_async + async def change_lease( # pylint: disable=inconsistent-return-statements self, lease_id: str, proposed_lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Required. :type proposed_lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "change" - accept = "application/xml" - - # Construct URL - url = self.change_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_change_lease_request( + url=self._config.url, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - change_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def break_lease( + @distributed_trace_async + async def break_lease( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, break_period: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param break_period: For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for - longer than the break period. If this header does not appear with a break operation, a fixed- - duration lease breaks after the remaining lease period elapses, and an infinite lease breaks - immediately. + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. Default value is None. :type break_period: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "break" - accept = "application/xml" - - # Construct URL - url = self.break_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if break_period is not None: - header_parameters['x-ms-lease-break-period'] = self._serialize.header("break_period", break_period, 'int') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_break_lease_request( + url=self._config.url, + timeout=timeout, + break_period=break_period, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - break_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def create_snapshot( + @distributed_trace_async + async def create_snapshot( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", + metadata: Optional[Dict[str, str]] = None, request_id_parameter: Optional[str] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Create Snapshot operation creates a read-only snapshot of a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -1996,36 +1987,41 @@ async def create_snapshot( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -2034,113 +2030,103 @@ async def create_snapshot( _if_tags = None _lease_id = None if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "snapshot" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + _request = build_create_snapshot_request( + url=self._config.url, + timeout=timeout, + metadata=metadata, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.create_snapshot.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-snapshot']=self._deserialize('str', response.headers.get('x-ms-snapshot')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers["x-ms-snapshot"] = self._deserialize("str", response.headers.get("x-ms-snapshot")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - create_snapshot.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def start_copy_from_url( + @distributed_trace_async + async def start_copy_from_url( # pylint: disable=inconsistent-return-statements self, copy_source: str, timeout: Optional[int] = None, - metadata: Optional[str] = None, - tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, - rehydrate_priority: Optional[Union[str, "_models.RehydratePriority"]] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, request_id_parameter: Optional[str] = None, blob_tags_string: Optional[str] = None, seal_blob: Optional[bool] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Start Copy From URL operation copies a blob or an internet resource to a new blob. :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -2148,38 +2134,57 @@ async def start_copy_from_url( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived - blob. + blob. Known values are: "High" and "Standard". Default value is None. :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str :param seal_blob: Overrides the sealed state of the destination blob. Service version - 2019-12-12 and newer. + 2019-12-12 and newer. Default value is None. :type seal_blob: bool - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :param modified_access_conditions: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + _source_if_modified_since = None _source_if_unmodified_since = None _source_if_match = None @@ -2191,124 +2196,113 @@ async def start_copy_from_url( _if_none_match = None _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_tags = source_modified_access_conditions.source_if_tags - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + if modified_access_conditions is not None: + _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + _request = build_start_copy_from_url_request( + url=self._config.url, + copy_source=copy_source, + timeout=timeout, + metadata=metadata, + tier=tier, + rehydrate_priority=rehydrate_priority, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + source_if_tags=_source_if_tags, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + seal_blob=seal_blob, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.start_copy_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if rehydrate_priority is not None: - header_parameters['x-ms-rehydrate-priority'] = self._serialize.header("rehydrate_priority", rehydrate_priority, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - if _source_if_tags is not None: - header_parameters['x-ms-source-if-tags'] = self._serialize.header("source_if_tags", _source_if_tags, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - if seal_blob is not None: - header_parameters['x-ms-seal-blob'] = self._serialize.header("seal_blob", seal_blob, 'bool') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) if cls: - return cls(pipeline_response, None, response_headers) - - start_copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def copy_from_url( + @distributed_trace_async + async def copy_from_url( # pylint: disable=inconsistent-return-statements self, copy_source: str, timeout: Optional[int] = None, - metadata: Optional[str] = None, - tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, request_id_parameter: Optional[str] = None, - source_content_md5: Optional[bytearray] = None, + source_content_md5: Optional[bytes] = None, blob_tags_string: Optional[str] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return a response until the copy is complete. :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -2316,35 +2310,66 @@ async def copy_from_url( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + from the copy source. Default value is None. + :type source_content_md5: bytes + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :param modified_access_conditions: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and + "COPY". Default value is None. + :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + x_ms_requires_sync: Literal["true"] = kwargs.pop( + "x_ms_requires_sync", _headers.pop("x-ms-requires-sync", "true") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + _source_if_modified_since = None _source_if_unmodified_since = None _source_if_match = None @@ -2355,781 +2380,829 @@ async def copy_from_url( _if_none_match = None _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id + _encryption_scope = None + if source_modified_access_conditions is not None: + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_none_match = source_modified_access_conditions.source_if_none_match + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_none_match = source_modified_access_conditions.source_if_none_match - x_ms_requires_sync = "true" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + + _request = build_copy_from_url_request( + url=self._config.url, + copy_source=copy_source, + timeout=timeout, + metadata=metadata, + tier=tier, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + copy_source_authorization=copy_source_authorization, + encryption_scope=_encryption_scope, + copy_source_tags=copy_source_tags, + x_ms_requires_sync=x_ms_requires_sync, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.copy_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-requires-sync'] = self._serialize.header("x_ms_requires_sync", x_ms_requires_sync, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def abort_copy_from_url( + @distributed_trace_async + async def abort_copy_from_url( # pylint: disable=inconsistent-return-statements self, copy_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination blob with zero length and full metadata. :param copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy - Blob operation. + Blob operation. Required. :type copy_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["copy"] = kwargs.pop("comp", _params.pop("comp", "copy")) + copy_action_abort_constant: Literal["abort"] = kwargs.pop( + "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id - comp = "copy" - copy_action_abort_constant = "abort" - accept = "application/xml" - - # Construct URL - url = self.abort_copy_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['copyid'] = self._serialize.query("copy_id", copy_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-copy-action'] = self._serialize.header("copy_action_abort_constant", copy_action_abort_constant, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_abort_copy_from_url_request( + url=self._config.url, + copy_id=copy_id, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + comp=comp, + copy_action_abort_constant=copy_action_abort_constant, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - abort_copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def set_tier( + @distributed_trace_async + async def set_tier( # pylint: disable=inconsistent-return-statements self, - tier: Union[str, "_models.AccessTierRequired"], + tier: Union[str, _models.AccessTierRequired], snapshot: Optional[str] = None, version_id: Optional[str] = None, timeout: Optional[int] = None, - rehydrate_priority: Optional[Union[str, "_models.RehydratePriority"]] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium storage account and on a block blob in a blob storage account (locally redundant storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's ETag. - :param tier: Indicates the tier to be set on the blob. + :param tier: Indicates the tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and "Cold". + Required. :type tier: str or ~azure.storage.blob.models.AccessTierRequired :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived - blob. + blob. Known values are: "High" and "Standard". Default value is None. :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tier"] = kwargs.pop("comp", _params.pop("comp", "tier")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_tags = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "tier" - accept = "application/xml" - # Construct URL - url = self.set_tier.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if rehydrate_priority is not None: - header_parameters['x-ms-rehydrate-priority'] = self._serialize.header("rehydrate_priority", rehydrate_priority, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_set_tier_request( + url=self._config.url, + tier=tier, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + rehydrate_priority=rehydrate_priority, + request_id_parameter=request_id_parameter, + lease_id=_lease_id, + if_tags=_if_tags, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - if response.status_code == 200: - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - - if response.status_code == 202: - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - set_tier.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def get_account_info( - self, - **kwargs + @distributed_trace_async + async def get_account_info( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Returns the sku name and account kind. - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "account" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_account_info.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_get_account_info_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) - response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) if cls: - return cls(pipeline_response, None, response_headers) - - get_account_info.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def query( self, snapshot: Optional[str] = None, timeout: Optional[int] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - query_request: Optional["_models.QueryRequest"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> IO: + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + query_request: Optional[_models.QueryRequest] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + # pylint: disable=line-too-long """The Query operation enables users to select/project on blob data by providing simple query expressions. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param query_request: the query request. - :type query_request: ~azure.storage.blob.models.QueryRequest - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IO, or the result of cls(response) - :rtype: IO - :raises: ~azure.core.exceptions.HttpResponseError + :param query_request: the query request. Default value is None. + :type query_request: ~azure.storage.blob.models.QueryRequest + :return: AsyncIterator[bytes] or the result of cls(response) + :rtype: AsyncIterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[IO] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["query"] = kwargs.pop("comp", _params.pop("comp", "query")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "query" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.query.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] + _if_unmodified_since = modified_access_conditions.if_unmodified_since if query_request is not None: - body_content = self._serialize.body(query_request, 'QueryRequest', is_xml=True) + _content = self._serialize.body(query_request, "QueryRequest", is_xml=True) else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs) + _content = None + + _request = build_query_request( + url=self._config.url, + snapshot=snapshot, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} if response.status_code == 200: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) if response.status_code == 206: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - query.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def get_tags( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, snapshot: Optional[str] = None, version_id: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs - ) -> "_models.BlobTags": + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> _models.BlobTags: + # pylint: disable=line-too-long """The Get Tags operation enables users to get the tags associated with a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: BlobTags, or the result of cls(response) + :return: BlobTags or the result of cls(response) :rtype: ~azure.storage.blob.models.BlobTags - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobTags"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) + cls: ClsType[_models.BlobTags] = kwargs.pop("cls", None) + _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "tags" - accept = "application/xml" + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + _request = build_get_tags_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + if_tags=_if_tags, + lease_id=_lease_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_tags.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('BlobTags', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("BlobTags", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_tags.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore - async def set_tags( + @distributed_trace_async + async def set_tags( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, version_id: Optional[str] = None, - transactional_content_md5: Optional[bytearray] = None, - transactional_content_crc64: Optional[bytearray] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, request_id_parameter: Optional[str] = None, - tags: Optional["_models.BlobTags"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + tags: Optional[_models.BlobTags] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Set Tags operation enables users to set tags on a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param tags: Blob tags. - :type tags: ~azure.storage.blob.models.BlobTags - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param tags: Blob tags. Default value is None. + :type tags: ~azure.storage.blob.models.BlobTags + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "tags" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.set_tags.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if tags is not None: - body_content = self._serialize.body(tags, 'BlobTags', is_xml=True) + _content = self._serialize.body(tags, "BlobTags", is_xml=True) else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _content = None + + _request = build_set_tags_request( + url=self._config.url, + timeout=timeout, + version_id=version_id, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + request_id_parameter=request_id_parameter, + if_tags=_if_tags, + lease_id=_lease_id, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_tags.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_block_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_block_blob_operations.py index 67c90b0bf9d4..45419c03cd47 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_block_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_block_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,115 +7,168 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ... import models as _models - -T = TypeVar('T') +from ...operations._block_blob_operations import ( + build_commit_block_list_request, + build_get_block_list_request, + build_put_blob_from_url_request, + build_stage_block_from_url_request, + build_stage_block_request, + build_upload_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class BlockBlobOperations: - """BlockBlobOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class BlockBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`block_blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def upload( + @distributed_trace_async + async def upload( # pylint: disable=inconsistent-return-statements self, content_length: int, - body: IO, + body: IO[bytes], timeout: Optional[int] = None, - transactional_content_md5: Optional[bytearray] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", - tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + transactional_content_md5: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, request_id_parameter: Optional[str] = None, blob_tags_string: Optional[str] = None, - blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + transactional_content_crc64: Optional[bytes] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Upload Block Blob operation updates the content of an existing block blob. Updating an existing block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a partial update of the content of a block blob, use the Put Block List operation. - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytes + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -124,6 +178,7 @@ async def upload( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -131,209 +186,215 @@ async def upload( _if_none_match = None _if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - blob_type = "BlockBlob" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.upload.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = body + + _request = build_upload_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + transactional_content_crc64=transactional_content_crc64, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + blob_type=blob_type, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) - - upload.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def put_blob_from_url( + @distributed_trace_async + async def put_blob_from_url( # pylint: disable=inconsistent-return-statements self, content_length: int, copy_source: str, timeout: Optional[int] = None, - transactional_content_md5: Optional[bytearray] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", - tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + transactional_content_md5: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, request_id_parameter: Optional[str] = None, - source_content_md5: Optional[bytearray] = None, + source_content_md5: Optional[bytes] = None, blob_tags_string: Optional[str] = None, copy_source_blob_properties: Optional[bool] = None, - blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - **kwargs + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are not supported with Put Blob from URL; the content of an existing blob is overwritten with the content of the new blob. To perform partial updates to a block blob’s contents using a source URL, use the Put Block from URL API in conjunction with Put Block List. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + from the copy source. Default value is None. + :type source_content_md5: bytes + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str :param copy_source_blob_properties: Optional, default is true. Indicates if properties from - the source blob should be copied. + the source blob should be copied. Default value is None. :type copy_source_blob_properties: bool - :param blob_http_headers: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and + "COPY". Default value is None. + :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -343,6 +404,7 @@ async def put_blob_from_url( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -355,343 +417,344 @@ async def put_blob_from_url( _source_if_none_match = None _source_if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_tags = source_modified_access_conditions.source_if_tags - blob_type = "BlockBlob" - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_put_blob_from_url_request( + url=self._config.url, + content_length=content_length, + copy_source=copy_source, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + source_if_tags=_source_if_tags, + request_id_parameter=request_id_parameter, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + copy_source_blob_properties=copy_source_blob_properties, + copy_source_authorization=copy_source_authorization, + copy_source_tags=copy_source_tags, + blob_type=blob_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.put_blob_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - if _source_if_tags is not None: - header_parameters['x-ms-source-if-tags'] = self._serialize.header("source_if_tags", _source_if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - if copy_source_blob_properties is not None: - header_parameters['x-ms-copy-source-blob-properties'] = self._serialize.header("copy_source_blob_properties", copy_source_blob_properties, 'bool') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - put_blob_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def stage_block( + @distributed_trace_async + async def stage_block( # pylint: disable=inconsistent-return-statements self, block_id: str, content_length: int, - body: IO, - transactional_content_md5: Optional[bytearray] = None, - transactional_content_crc64: Optional[bytearray] = None, + body: IO[bytes], + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, timeout: Optional[int] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - **kwargs + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Stage Block operation creates a new block to be committed as part of a blob. :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the string must be less than or equal to 64 bytes in size. For a given blob, the length of the - value specified for the blockid parameter must be the same size for each block. + value specified for the blockid parameter must be the same size for each block. Required. :type block_id: str - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - comp = "block" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.stage_block.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['blockid'] = self._serialize.query("block_id", block_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _content = body + + _request = build_stage_block_request( + url=self._config.url, + block_id=block_id, + content_length=content_length, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + request_id_parameter=request_id_parameter, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - stage_block.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def stage_block_from_url( + @distributed_trace_async + async def stage_block_from_url( # pylint: disable=inconsistent-return-statements self, block_id: str, content_length: int, source_url: str, source_range: Optional[str] = None, - source_content_md5: Optional[bytearray] = None, - source_contentcrc64: Optional[bytearray] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, timeout: Optional[int] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - **kwargs + copy_source_authorization: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Stage Block operation creates a new block to be committed as part of a blob where the contents are read from a URL. :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the string must be less than or equal to 64 bytes in size. For a given blob, the length of the - value specified for the blockid parameter must be the same size for each block. + value specified for the blockid parameter must be the same size for each block. Required. :type block_id: str - :param content_length: The length of the request. - :type content_length: long - :param source_url: Specify a URL to the copy source. + :param content_length: The length of the request. Required. + :type content_length: int + :param source_url: Specify a URL to the copy source. Required. :type source_url: str - :param source_range: Bytes of source data in the specified range. + :param source_range: Bytes of source data in the specified range. Default value is None. :type source_range: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray + from the copy source. Default value is None. + :type source_content_md5: bytes :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. - :type source_contentcrc64: bytearray + read from the copy source. Default value is None. + :type source_contentcrc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _lease_id = None _source_if_modified_since = None @@ -699,6 +762,7 @@ async def stage_block_from_url( _source_if_match = None _source_if_none_match = None if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: @@ -706,103 +770,96 @@ async def stage_block_from_url( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match - comp = "block" - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_stage_block_from_url_request( + url=self._config.url, + block_id=block_id, + content_length=content_length, + source_url=source_url, + source_range=source_range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.stage_block_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['blockid'] = self._serialize.query("block_id", block_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') - if source_range is not None: - header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if source_contentcrc64 is not None: - header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - stage_block_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def commit_block_list( + @distributed_trace_async + async def commit_block_list( # pylint: disable=inconsistent-return-statements self, - blocks: "_models.BlockLookupList", + blocks: _models.BlockLookupList, timeout: Optional[int] = None, - transactional_content_md5: Optional[bytearray] = None, - transactional_content_crc64: Optional[bytearray] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", - tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, request_id_parameter: Optional[str] = None, blob_tags_string: Optional[str] = None, - blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Commit Block List operation writes a blob by specifying the list of block IDs that make up the blob. In order to be written as part of a blob, a block must have been successfully written to the server in a prior Put Block operation. You can call Put Block List to update a blob by @@ -811,58 +868,75 @@ async def commit_block_list( or from the uncommitted block list, or to commit the most recently uploaded version of the block, whichever list it may belong to. - :param blocks: + :param blocks: Blob Blocks. Required. :type blocks: ~azure.storage.blob.models.BlockLookupList :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_cache_control = None _blob_content_type = None _blob_content_encoding = None @@ -872,6 +946,7 @@ async def commit_block_list( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -880,224 +955,210 @@ async def commit_block_list( _if_tags = None if blob_http_headers is not None: _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_type = blob_http_headers.blob_content_type + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "blocklist" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.commit_block_list.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(blocks, 'BlockLookupList', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = self._serialize.body(blocks, "BlockLookupList", is_xml=True) + + _request = build_commit_block_list_request( + url=self._config.url, + timeout=timeout, + blob_cache_control=_blob_cache_control, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - commit_block_list.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def get_block_list( self, snapshot: Optional[str] = None, - list_type: Union[str, "_models.BlockListType"] = "committed", + list_type: Union[str, _models.BlockListType] = "committed", timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> "_models.BlockList": + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> _models.BlockList: + # pylint: disable=line-too-long """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block blob. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param list_type: Specifies whether to return the list of committed blocks, the list of - uncommitted blocks, or both lists together. + uncommitted blocks, or both lists together. Known values are: "committed", "uncommitted", and + "all". Default value is "committed". :type list_type: str or ~azure.storage.blob.models.BlockListType :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: BlockList, or the result of cls(response) + :return: BlockList or the result of cls(response) :rtype: ~azure.storage.blob.models.BlockList - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.BlockList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) + cls: ClsType[_models.BlockList] = kwargs.pop("cls", None) + _lease_id = None _if_tags = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "blocklist" - accept = "application/xml" - # Construct URL - url = self.get_block_list.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - query_parameters['blocklisttype'] = self._serialize.query("list_type", list_type, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_get_block_list_request( + url=self._config.url, + snapshot=snapshot, + list_type=list_type, + timeout=timeout, + lease_id=_lease_id, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('BlockList', pipeline_response) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("BlockList", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_block_list.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_container_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_container_operations.py index ed32bc96241b..c2bc375a197f 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_container_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_container_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,56 +6,91 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, AsyncIterator, Callable, Dict, IO, List, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ... import models as _models - -T = TypeVar('T') +from ...operations._container_operations import ( + build_acquire_lease_request, + build_break_lease_request, + build_change_lease_request, + build_create_request, + build_delete_request, + build_filter_blobs_request, + build_get_access_policy_request, + build_get_account_info_request, + build_get_properties_request, + build_list_blob_flat_segment_request, + build_list_blob_hierarchy_segment_request, + build_release_lease_request, + build_rename_request, + build_renew_lease_request, + build_restore_request, + build_set_access_policy_request, + build_set_metadata_request, + build_submit_batch_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ContainerOperations: - """ContainerOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class ContainerOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`container` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def create( + @distributed_trace_async + async def create( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, - metadata: Optional[str] = None, - access: Optional[Union[str, "_models.PublicAccessType"]] = None, + metadata: Optional[Dict[str, str]] = None, + access: Optional[Union[str, _models.PublicAccessType]] = None, request_id_parameter: Optional[str] = None, - container_cpk_scope_info: Optional["_models.ContainerCpkScopeInfo"] = None, - **kwargs + container_cpk_scope_info: Optional[_models.ContainerCpkScopeInfo] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """creates a new container under the specified account. If the container with the same name already exists, the operation fails. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -62,207 +98,227 @@ async def create( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str + information. Default value is None. + :type metadata: dict[str, str] :param access: Specifies whether data in the container may be accessed publicly and the level - of access. + of access. Known values are: "container" and "blob". Default value is None. :type access: str or ~azure.storage.blob.models.PublicAccessType :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param container_cpk_scope_info: Parameter group. + :param container_cpk_scope_info: Parameter group. Default value is None. :type container_cpk_scope_info: ~azure.storage.blob.models.ContainerCpkScopeInfo - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + cls: ClsType[None] = kwargs.pop("cls", None) + _default_encryption_scope = None _prevent_encryption_scope_override = None if container_cpk_scope_info is not None: _default_encryption_scope = container_cpk_scope_info.default_encryption_scope _prevent_encryption_scope_override = container_cpk_scope_info.prevent_encryption_scope_override - restype = "container" - accept = "application/xml" - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if access is not None: - header_parameters['x-ms-blob-public-access'] = self._serialize.header("access", access, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _default_encryption_scope is not None: - header_parameters['x-ms-default-encryption-scope'] = self._serialize.header("default_encryption_scope", _default_encryption_scope, 'str') - if _prevent_encryption_scope_override is not None: - header_parameters['x-ms-deny-encryption-scope-override'] = self._serialize.header("prevent_encryption_scope_override", _prevent_encryption_scope_override, 'bool') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_create_request( + url=self._config.url, + timeout=timeout, + metadata=metadata, + access=access, + request_id_parameter=request_id_parameter, + default_encryption_scope=_default_encryption_scope, + prevent_encryption_scope_override=_prevent_encryption_scope_override, + restype=restype, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - create.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def get_properties( + @distributed_trace_async + async def get_properties( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """returns all user-defined metadata and system properties for the specified container. The data returned does not include the container's list of blobs. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id - restype = "container" - accept = "application/xml" - # Construct URL - url = self.get_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_get_properties_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + restype=restype, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) - response_headers['x-ms-has-immutability-policy']=self._deserialize('bool', response.headers.get('x-ms-has-immutability-policy')) - response_headers['x-ms-has-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-has-legal-hold')) - response_headers['x-ms-default-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-default-encryption-scope')) - response_headers['x-ms-deny-encryption-scope-override']=self._deserialize('bool', response.headers.get('x-ms-deny-encryption-scope-override')) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-public-access"] = self._deserialize( + "str", response.headers.get("x-ms-blob-public-access") + ) + response_headers["x-ms-has-immutability-policy"] = self._deserialize( + "bool", response.headers.get("x-ms-has-immutability-policy") + ) + response_headers["x-ms-has-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-has-legal-hold")) + response_headers["x-ms-default-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-default-encryption-scope") + ) + response_headers["x-ms-deny-encryption-scope-override"] = self._deserialize( + "bool", response.headers.get("x-ms-deny-encryption-scope-override") + ) + response_headers["x-ms-immutable-storage-with-versioning-enabled"] = self._deserialize( + "bool", response.headers.get("x-ms-immutable-storage-with-versioning-enabled") + ) if cls: - return cls(pipeline_response, None, response_headers) - - get_properties.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def delete( + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """operation marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -271,69 +327,61 @@ async def delete( if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - restype = "container" - accept = "application/xml" - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_delete_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + restype=restype, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - delete.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def set_metadata( + @distributed_trace_async + async def set_metadata( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, - metadata: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """operation sets one or more user-defined name-value pairs for the specified container. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -341,210 +389,226 @@ async def set_metadata( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since - restype = "container" - comp = "metadata" - accept = "application/xml" - - # Construct URL - url = self.set_metadata.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_set_metadata_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + metadata=metadata, + if_modified_since=_if_modified_since, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_metadata.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def get_access_policy( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - **kwargs - ) -> List["_models.SignedIdentifier"]: + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> List[_models.SignedIdentifier]: + # pylint: disable=line-too-long """gets the permissions for the specified container. The permissions indicate whether container data may be accessed publicly. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: list of SignedIdentifier, or the result of cls(response) + :return: list of SignedIdentifier or the result of cls(response) :rtype: list[~azure.storage.blob.models.SignedIdentifier] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[List["_models.SignedIdentifier"]] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) + cls: ClsType[List[_models.SignedIdentifier]] = kwargs.pop("cls", None) + _lease_id = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id - restype = "container" - comp = "acl" - accept = "application/xml" - - # Construct URL - url = self.get_access_policy.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_get_access_policy_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('[SignedIdentifier]', pipeline_response) + response_headers["x-ms-blob-public-access"] = self._deserialize( + "str", response.headers.get("x-ms-blob-public-access") + ) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_access_policy.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore - async def set_access_policy( + @distributed_trace_async + async def set_access_policy( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, - access: Optional[Union[str, "_models.PublicAccessType"]] = None, + access: Optional[Union[str, _models.PublicAccessType]] = None, request_id_parameter: Optional[str] = None, - container_acl: Optional[List["_models.SignedIdentifier"]] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + container_acl: Optional[List[_models.SignedIdentifier]] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """sets the permissions for the specified container. The permissions indicate whether blobs in a container may be accessed publicly. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param access: Specifies whether data in the container may be accessed publicly and the level - of access. + of access. Known values are: "container" and "blob". Default value is None. :type access: str or ~azure.storage.blob.models.PublicAccessType :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param container_acl: the acls for the container. - :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param container_acl: the acls for the container. Default value is None. + :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -553,911 +617,1199 @@ async def set_access_policy( if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - restype = "container" - comp = "acl" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.set_access_policy.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if access is not None: - header_parameters['x-ms-blob-public-access'] = self._serialize.header("access", access, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - serialization_ctxt = {'xml': {'name': 'SignedIdentifiers', 'wrapped': True, 'itemsName': 'SignedIdentifier'}} + serialization_ctxt = {"xml": {"name": "SignedIdentifiers", "wrapped": True, "itemsName": "SignedIdentifier"}} if container_acl is not None: - body_content = self._serialize.body(container_acl, '[SignedIdentifier]', is_xml=True, serialization_ctxt=serialization_ctxt) + _content = self._serialize.body( + container_acl, "[SignedIdentifier]", is_xml=True, serialization_ctxt=serialization_ctxt + ) else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _content = None + + _request = build_set_access_policy_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + access=access, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - set_access_policy.metadata = {'url': '/{containerName}'} # type: ignore - - async def restore( + @distributed_trace_async + async def restore( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, deleted_container_name: Optional[str] = None, deleted_container_version: Optional[str] = None, - **kwargs + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Restores a previously-deleted container. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param deleted_container_name: Optional. Version 2019-12-12 and later. Specifies the name of - the deleted container to restore. + the deleted container to restore. Default value is None. :type deleted_container_name: str :param deleted_container_version: Optional. Version 2019-12-12 and later. Specifies the - version of the deleted container to restore. + version of the deleted container to restore. Default value is None. :type deleted_container_version: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "container" - comp = "undelete" - accept = "application/xml" - - # Construct URL - url = self.restore.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if deleted_container_name is not None: - header_parameters['x-ms-deleted-container-name'] = self._serialize.header("deleted_container_name", deleted_container_name, 'str') - if deleted_container_version is not None: - header_parameters['x-ms-deleted-container-version'] = self._serialize.header("deleted_container_version", deleted_container_version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_restore_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + deleted_container_name=deleted_container_name, + deleted_container_version=deleted_container_version, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def rename( # pylint: disable=inconsistent-return-statements + self, + source_container_name: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + source_lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """Renames an existing container. + + :param source_container_name: Required. Specifies the name of the container to rename. + Required. + :type source_container_name: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param source_lease_id: A lease ID for the source path. If specified, the source path must have + an active lease and the lease ID must match. Default value is None. + :type source_lease_id: str + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_rename_request( + url=self._config.url, + source_container_name=source_container_name, + timeout=timeout, + request_id_parameter=request_id_parameter, + source_lease_id=source_lease_id, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def submit_batch( + self, + content_length: int, + body: IO[bytes], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + # pylint: disable=line-too-long + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: AsyncIterator[bytes] or the result of cls(response) + :rtype: AsyncIterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) + multipart_content_type: str = kwargs.pop( + "multipart_content_type", _headers.pop("Content-Type", "application/xml") + ) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _content = body + + _request = build_submit_batch_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + multipart_content_type=multipart_content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - restore.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore - async def acquire_lease( + @distributed_trace_async + async def filter_blobs( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, + **kwargs: Any + ) -> _models.FilterBlobSegment: + # pylint: disable=line-too-long + """The Filter Blobs operation enables callers to list blobs in a container whose tags match a + given search expression. Filter blobs searches within the given container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param where: Filters the results to return only to return only blobs whose tags match the + specified expression. Default value is None. + :type where: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :type include: list[str or ~azure.storage.blob.models.FilterBlobsIncludeItem] + :return: FilterBlobSegment or the result of cls(response) + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) + cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) + + _request = build_filter_blobs_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + where=where, + marker=marker, + maxresults=maxresults, + include=include, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def acquire_lease( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, duration: Optional[int] = None, proposed_lease_id: Optional[str] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease - duration cannot be changed using renew or change. + duration cannot be changed using renew or change. Default value is None. :type duration: int :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Default value is None. :type proposed_lease_id: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "acquire" - accept = "application/xml" - - # Construct URL - url = self.acquire_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if duration is not None: - header_parameters['x-ms-lease-duration'] = self._serialize.header("duration", duration, 'int') - if proposed_lease_id is not None: - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_acquire_lease_request( + url=self._config.url, + timeout=timeout, + duration=duration, + proposed_lease_id=proposed_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - acquire_lease.metadata = {'url': '/{containerName}'} # type: ignore - - async def release_lease( + @distributed_trace_async + async def release_lease( # pylint: disable=inconsistent-return-statements self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "release" - accept = "application/xml" - - # Construct URL - url = self.release_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_release_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - release_lease.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def renew_lease( + @distributed_trace_async + async def renew_lease( # pylint: disable=inconsistent-return-statements self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "renew" - accept = "application/xml" - - # Construct URL - url = self.renew_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_renew_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - renew_lease.metadata = {'url': '/{containerName}'} # type: ignore - - async def break_lease( + @distributed_trace_async + async def break_lease( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, break_period: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param break_period: For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for - longer than the break period. If this header does not appear with a break operation, a fixed- - duration lease breaks after the remaining lease period elapses, and an infinite lease breaks - immediately. + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. Default value is None. :type break_period: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "break" - accept = "application/xml" - - # Construct URL - url = self.break_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if break_period is not None: - header_parameters['x-ms-lease-break-period'] = self._serialize.header("break_period", break_period, 'int') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_break_lease_request( + url=self._config.url, + timeout=timeout, + break_period=break_period, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - break_lease.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def change_lease( + @distributed_trace_async + async def change_lease( # pylint: disable=inconsistent-return-statements self, lease_id: str, proposed_lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Required. :type proposed_lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "change" - accept = "application/xml" - - # Construct URL - url = self.change_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_change_lease_request( + url=self._config.url, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - change_lease.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def list_blob_flat_segment( self, prefix: Optional[str] = None, marker: Optional[str] = None, maxresults: Optional[int] = None, - include: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs - ) -> "_models.ListBlobsFlatSegmentResponse": + **kwargs: Any + ) -> _models.ListBlobsFlatSegmentResponse: + # pylint: disable=line-too-long """[Update] The List Blobs operation returns a list of the blobs under the specified container. :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. + specified prefix. Default value is None. :type prefix: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int :param include: Include this parameter to specify one or more datasets to include in the - response. + response. Default value is None. :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListBlobsFlatSegmentResponse, or the result of cls(response) + :return: ListBlobsFlatSegmentResponse or the result of cls(response) :rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsFlatSegmentResponse"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "container" - comp = "list" - accept = "application/xml" - - # Construct URL - url = self.list_blob_flat_segment.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if prefix is not None: - query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - if include is not None: - query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + cls: ClsType[_models.ListBlobsFlatSegmentResponse] = kwargs.pop("cls", None) + + _request = build_list_blob_flat_segment_request( + url=self._config.url, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('ListBlobsFlatSegmentResponse', pipeline_response) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("ListBlobsFlatSegmentResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - list_blob_flat_segment.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def list_blob_hierarchy_segment( self, delimiter: str, prefix: Optional[str] = None, marker: Optional[str] = None, maxresults: Optional[int] = None, - include: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs - ) -> "_models.ListBlobsHierarchySegmentResponse": + **kwargs: Any + ) -> _models.ListBlobsHierarchySegmentResponse: + # pylint: disable=line-too-long """[Update] The List Blobs operation returns a list of the blobs under the specified container. :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix element in the response body that acts as a placeholder for all blobs whose names begin with the same substring up to the appearance of the delimiter character. The delimiter may be a - single character or a string. + single character or a string. Required. :type delimiter: str :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. + specified prefix. Default value is None. :type prefix: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int :param include: Include this parameter to specify one or more datasets to include in the - response. + response. Default value is None. :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListBlobsHierarchySegmentResponse, or the result of cls(response) + :return: ListBlobsHierarchySegmentResponse or the result of cls(response) :rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsHierarchySegmentResponse"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "container" - comp = "list" - accept = "application/xml" - - # Construct URL - url = self.list_blob_hierarchy_segment.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if prefix is not None: - query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') - query_parameters['delimiter'] = self._serialize.query("delimiter", delimiter, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - if include is not None: - query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + cls: ClsType[_models.ListBlobsHierarchySegmentResponse] = kwargs.pop("cls", None) + + _request = build_list_blob_hierarchy_segment_request( + url=self._config.url, + delimiter=delimiter, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('ListBlobsHierarchySegmentResponse', pipeline_response) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - list_blob_hierarchy_segment.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore - async def get_account_info( - self, - **kwargs + @distributed_trace_async + async def get_account_info( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Returns the sku name and account kind. - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "account" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_account_info.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_get_account_info_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) - response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) if cls: - return cls(pipeline_response, None, response_headers) - - get_account_info.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_directory_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_directory_operations.py deleted file mode 100644 index 338ff69e3adc..000000000000 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_directory_operations.py +++ /dev/null @@ -1,739 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest - -from ... import models as _models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class DirectoryOperations: - """DirectoryOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = _models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def create( - self, - timeout: Optional[int] = None, - directory_properties: Optional[str] = None, - posix_permissions: Optional[str] = None, - posix_umask: Optional[str] = None, - request_id_parameter: Optional[str] = None, - directory_http_headers: Optional["_models.DirectoryHttpHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> None: - """Create a directory. By default, the destination is overwritten and if the destination already - exists and has a lease the lease is broken. This operation supports conditional HTTP requests. - For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. To fail if the destination already exists, use a conditional - request with If-None-Match: "*". - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. - :type timeout: int - :param directory_properties: Optional. User-defined properties to be stored with the file or - directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", - where each value is base64 encoded. - :type directory_properties: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_umask: Only valid if Hierarchical Namespace is enabled for the account. This umask - restricts permission settings for file and directory, and will only be applied when default Acl - does not exist in parent directory. If the umask bit has set, it means that the corresponding - permission will be disabled. Otherwise the corresponding permission will be determined by the - permission. A 4-digit octal notation (e.g. 0022) is supported here. If no umask was specified, - a default umask - 0027 will be used. - :type posix_umask: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. - :type request_id_parameter: str - :param directory_http_headers: Parameter group. - :type directory_http_headers: ~azure.storage.blob.models.DirectoryHttpHeaders - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - _cache_control = None - _content_type = None - _content_encoding = None - _content_language = None - _content_disposition = None - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - if directory_http_headers is not None: - _cache_control = directory_http_headers.cache_control - _content_type = directory_http_headers.content_type - _content_encoding = directory_http_headers.content_encoding - _content_language = directory_http_headers.content_language - _content_disposition = directory_http_headers.content_disposition - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - resource = "directory" - accept = "application/xml" - - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['resource'] = self._serialize.query("resource", resource, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if directory_properties is not None: - header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_umask is not None: - header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str') - if _cache_control is not None: - header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str') - if _content_type is not None: - header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str') - if _content_encoding is not None: - header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str') - if _content_language is not None: - header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str') - if _content_disposition is not None: - header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - - if cls: - return cls(pipeline_response, None, response_headers) - - create.metadata = {'url': '/{filesystem}/{path}'} # type: ignore - - async def rename( - self, - rename_source: str, - timeout: Optional[int] = None, - marker: Optional[str] = None, - path_rename_mode: Optional[Union[str, "_models.PathRenameMode"]] = None, - directory_properties: Optional[str] = None, - posix_permissions: Optional[str] = None, - posix_umask: Optional[str] = None, - source_lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - directory_http_headers: Optional["_models.DirectoryHttpHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - **kwargs - ) -> None: - """Rename a directory. By default, the destination is overwritten and if the destination already - exists and has a lease the lease is broken. This operation supports conditional HTTP requests. - For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. To fail if the destination already exists, use a conditional - request with If-None-Match: "*". - - :param rename_source: The file or directory to be renamed. The value must have the following - format: "/{filesysystem}/{path}". If "x-ms-properties" is specified, the properties will - overwrite the existing properties; otherwise, the existing properties will be preserved. - :type rename_source: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. - :type timeout: int - :param marker: When renaming a directory, the number of paths that are renamed with each - invocation is limited. If the number of paths to be renamed exceeds this limit, a continuation - token is returned in this response header. When a continuation token is returned in the - response, it must be specified in a subsequent invocation of the rename operation to continue - renaming the directory. - :type marker: str - :param path_rename_mode: Determines the behavior of the rename operation. - :type path_rename_mode: str or ~azure.storage.blob.models.PathRenameMode - :param directory_properties: Optional. User-defined properties to be stored with the file or - directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", - where each value is base64 encoded. - :type directory_properties: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_umask: Only valid if Hierarchical Namespace is enabled for the account. This umask - restricts permission settings for file and directory, and will only be applied when default Acl - does not exist in parent directory. If the umask bit has set, it means that the corresponding - permission will be disabled. Otherwise the corresponding permission will be determined by the - permission. A 4-digit octal notation (e.g. 0022) is supported here. If no umask was specified, - a default umask - 0027 will be used. - :type posix_umask: str - :param source_lease_id: A lease ID for the source path. If specified, the source path must have - an active lease and the lease ID must match. - :type source_lease_id: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. - :type request_id_parameter: str - :param directory_http_headers: Parameter group. - :type directory_http_headers: ~azure.storage.blob.models.DirectoryHttpHeaders - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - _cache_control = None - _content_type = None - _content_encoding = None - _content_language = None - _content_disposition = None - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - if directory_http_headers is not None: - _cache_control = directory_http_headers.cache_control - _content_type = directory_http_headers.content_type - _content_encoding = directory_http_headers.content_encoding - _content_language = directory_http_headers.content_language - _content_disposition = directory_http_headers.content_disposition - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_none_match = source_modified_access_conditions.source_if_none_match - accept = "application/xml" - - # Construct URL - url = self.rename.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if marker is not None: - query_parameters['continuation'] = self._serialize.query("marker", marker, 'str') - if path_rename_mode is not None: - query_parameters['mode'] = self._serialize.query("path_rename_mode", path_rename_mode, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-rename-source'] = self._serialize.header("rename_source", rename_source, 'str') - if directory_properties is not None: - header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_umask is not None: - header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str') - if _cache_control is not None: - header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str') - if _content_type is not None: - header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str') - if _content_encoding is not None: - header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str') - if _content_language is not None: - header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str') - if _content_disposition is not None: - header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if source_lease_id is not None: - header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - - if cls: - return cls(pipeline_response, None, response_headers) - - rename.metadata = {'url': '/{filesystem}/{path}'} # type: ignore - - async def delete( - self, - recursive_directory_delete: bool, - timeout: Optional[int] = None, - marker: Optional[str] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> None: - """Deletes the directory. - - :param recursive_directory_delete: If "true", all paths beneath the directory will be deleted. - If "false" and the directory is non-empty, an error occurs. - :type recursive_directory_delete: bool - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. - :type timeout: int - :param marker: When renaming a directory, the number of paths that are renamed with each - invocation is limited. If the number of paths to be renamed exceeds this limit, a continuation - token is returned in this response header. When a continuation token is returned in the - response, it must be specified in a subsequent invocation of the rename operation to continue - renaming the directory. - :type marker: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - accept = "application/xml" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - query_parameters['recursive'] = self._serialize.query("recursive_directory_delete", recursive_directory_delete, 'bool') - if marker is not None: - query_parameters['continuation'] = self._serialize.query("marker", marker, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - - if cls: - return cls(pipeline_response, None, response_headers) - - delete.metadata = {'url': '/{filesystem}/{path}'} # type: ignore - - async def set_access_control( - self, - timeout: Optional[int] = None, - owner: Optional[str] = None, - group: Optional[str] = None, - posix_permissions: Optional[str] = None, - posix_acl: Optional[str] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> None: - """Set the owner, group, permissions, or access control list for a directory. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. - :type timeout: int - :param owner: Optional. The owner of the blob or directory. - :type owner: str - :param group: Optional. The owning group of the blob or directory. - :type group: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". - :type posix_acl: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - action = "setAccessControl" - accept = "application/xml" - - # Construct URL - url = self.set_access_control.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['action'] = self._serialize.query("action", action, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if owner is not None: - header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str') - if group is not None: - header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_acl is not None: - header_parameters['x-ms-acl'] = self._serialize.header("posix_acl", posix_acl, 'str') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.patch(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - - if cls: - return cls(pipeline_response, None, response_headers) - - set_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore - - async def get_access_control( - self, - timeout: Optional[int] = None, - upn: Optional[bool] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> None: - """Get the owner, group, permissions, or access control list for a directory. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. - :type timeout: int - :param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If - "true", the identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response - headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If - "false", the values will be returned as Azure Active Directory Object IDs. The default value is - false. - :type upn: bool - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - action = "getAccessControl" - accept = "application/xml" - - # Construct URL - url = self.get_access_control.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['action'] = self._serialize.query("action", action, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if upn is not None: - query_parameters['upn'] = self._serialize.query("upn", upn, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.head(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-owner']=self._deserialize('str', response.headers.get('x-ms-owner')) - response_headers['x-ms-group']=self._deserialize('str', response.headers.get('x-ms-group')) - response_headers['x-ms-permissions']=self._deserialize('str', response.headers.get('x-ms-permissions')) - response_headers['x-ms-acl']=self._deserialize('str', response.headers.get('x-ms-acl')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - - if cls: - return cls(pipeline_response, None, response_headers) - - get_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_page_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_page_blob_operations.py index 100f730254ed..38e66803e85b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_page_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_page_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,70 +7,98 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ... import models as _models - -T = TypeVar('T') +from ...operations._page_blob_operations import ( + build_clear_pages_request, + build_copy_incremental_request, + build_create_request, + build_get_page_ranges_diff_request, + build_get_page_ranges_request, + build_resize_request, + build_update_sequence_number_request, + build_upload_pages_from_url_request, + build_upload_pages_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class PageBlobOperations: - """PageBlobOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class PageBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`page_blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def create( + @distributed_trace_async + async def create( # pylint: disable=inconsistent-return-statements self, content_length: int, blob_content_length: int, timeout: Optional[int] = None, - tier: Optional[Union[str, "_models.PremiumPageBlobAccessTier"]] = None, - metadata: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", - blob_sequence_number: Optional[int] = 0, + tier: Optional[Union[str, _models.PremiumPageBlobAccessTier]] = None, + metadata: Optional[Dict[str, str]] = None, + blob_sequence_number: int = 0, request_id_parameter: Optional[str] = None, blob_tags_string: Optional[str] = None, - blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Create operation creates a new page blob. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 - TB. The page blob size must be aligned to a 512-byte boundary. - :type blob_content_length: long + TB. The page blob size must be aligned to a 512-byte boundary. Required. + :type blob_content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param tier: Optional. Indicates the tier to be set on the page blob. + :param tier: Optional. Indicates the tier to be set on the page blob. Known values are: "P4", + "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", and "P80". Default value is None. :type tier: str or ~azure.storage.blob.models.PremiumPageBlobAccessTier :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -77,42 +106,55 @@ async def create( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 - and 2^63 - 1. - :type blob_sequence_number: long + and 2^63 - 1. Default value is 0. + :type blob_sequence_number: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["PageBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "PageBlob")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -122,6 +164,7 @@ async def create( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -129,183 +172,184 @@ async def create( _if_none_match = None _if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - blob_type = "PageBlob" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_create_request( + url=self._config.url, + content_length=content_length, + blob_content_length=blob_content_length, + timeout=timeout, + tier=tier, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_sequence_number=blob_sequence_number, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + blob_type=blob_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long') - if blob_sequence_number is not None: - header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - create.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def upload_pages( + @distributed_trace_async + async def upload_pages( # pylint: disable=inconsistent-return-statements self, content_length: int, - body: IO, - transactional_content_md5: Optional[bytearray] = None, - transactional_content_crc64: Optional[bytearray] = None, + body: IO[bytes], + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, timeout: Optional[int] = None, range: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - sequence_number_access_conditions: Optional["_models.SequenceNumberAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Upload Pages operation writes a range of pages to a page blob. - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param sequence_number_access_conditions: Parameter group. - :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_sequence_number_less_than_or_equal_to = None _if_sequence_number_less_than = None @@ -315,168 +359,169 @@ async def upload_pages( _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_less_than_or_equal_to = ( + sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + ) if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if sequence_number_access_conditions is not None: - _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - comp = "page" - page_write = "update" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.upload_pages.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_sequence_number_less_than_or_equal_to is not None: - header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') - if _if_sequence_number_less_than is not None: - header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') - if _if_sequence_number_equal_to is not None: - header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = body + + _request = build_upload_pages_request( + url=self._config.url, + content_length=content_length, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + range=range, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + comp=comp, + page_write=page_write, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - upload_pages.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def clear_pages( + @distributed_trace_async + async def clear_pages( # pylint: disable=inconsistent-return-statements self, content_length: int, timeout: Optional[int] = None, range: Optional[str] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - sequence_number_access_conditions: Optional["_models.SequenceNumberAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Clear Pages operation clears a set of pages from a page blob. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param sequence_number_access_conditions: Parameter group. - :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["clear"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "clear")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_sequence_number_less_than_or_equal_to = None _if_sequence_number_less_than = None @@ -486,176 +531,174 @@ async def clear_pages( _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_less_than_or_equal_to = ( + sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + ) if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if sequence_number_access_conditions is not None: - _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - comp = "page" - page_write = "clear" - accept = "application/xml" - - # Construct URL - url = self.clear_pages.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_sequence_number_less_than_or_equal_to is not None: - header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') - if _if_sequence_number_less_than is not None: - header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') - if _if_sequence_number_equal_to is not None: - header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_clear_pages_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + range=range, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + page_write=page_write, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - clear_pages.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def upload_pages_from_url( + @distributed_trace_async + async def upload_pages_from_url( # pylint: disable=inconsistent-return-statements self, source_url: str, source_range: str, content_length: int, range: str, - source_content_md5: Optional[bytearray] = None, - source_contentcrc64: Optional[bytearray] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, timeout: Optional[int] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - sequence_number_access_conditions: Optional["_models.SequenceNumberAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, - **kwargs + copy_source_authorization: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Upload Pages operation writes a range of pages to a page blob where the contents are read from a URL. - :param source_url: Specify a URL to the copy source. + :param source_url: Specify a URL to the copy source. Required. :type source_url: str :param source_range: Bytes of source data in the specified range. The length of this range - should match the ContentLength header and x-ms-range/Range destination range header. + should match the ContentLength header and x-ms-range/Range destination range header. Required. :type source_range: str - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param range: The range of bytes to which the source range would be written. The range should - be 512 aligned and range-end is required. + be 512 aligned and range-end is required. Required. :type range: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray + from the copy source. Default value is None. + :type source_content_md5: bytes :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. - :type source_contentcrc64: bytearray + read from the copy source. Default value is None. + :type source_contentcrc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param sequence_number_access_conditions: Parameter group. - :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _lease_id = None _if_sequence_number_less_than_or_equal_to = None @@ -671,164 +714,175 @@ async def upload_pages_from_url( _source_if_match = None _source_if_none_match = None if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_less_than_or_equal_to = ( + sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + ) if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if sequence_number_access_conditions is not None: - _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_unmodified_since = modified_access_conditions.if_unmodified_since if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match - comp = "page" - page_write = "update" - accept = "application/xml" - - # Construct URL - url = self.upload_pages_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') - header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if source_contentcrc64 is not None: - header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_sequence_number_less_than_or_equal_to is not None: - header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') - if _if_sequence_number_less_than is not None: - header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') - if _if_sequence_number_equal_to is not None: - header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_upload_pages_from_url_request( + url=self._config.url, + source_url=source_url, + source_range=source_range, + content_length=content_length, + range=range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + comp=comp, + page_write=page_write, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - upload_pages_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def get_page_ranges( self, snapshot: Optional[str] = None, timeout: Optional[int] = None, range: Optional[str] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> "_models.PageList": + marker: Optional[str] = None, + maxresults: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> _models.PageList: + # pylint: disable=line-too-long """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a page blob. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PageList, or the result of cls(response) + :return: PageList or the result of cls(response) :rtype: ~azure.storage.blob.models.PageList - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) + cls: ClsType[_models.PageList] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -838,75 +892,66 @@ async def get_page_ranges( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "pagelist" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_get_page_ranges_request( + url=self._config.url, + snapshot=snapshot, + timeout=timeout, + range=range, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + marker=marker, + maxresults=maxresults, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_page_ranges.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('PageList', pipeline_response) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("PageList", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_page_ranges.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def get_page_ranges_diff( self, snapshot: Optional[str] = None, @@ -915,55 +960,81 @@ async def get_page_ranges_diff( prev_snapshot_url: Optional[str] = None, range: Optional[str] = None, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs - ) -> "_models.PageList": + marker: Optional[str] = None, + maxresults: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> _models.PageList: + # pylint: disable=line-too-long """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were changed between target blob and previous snapshot. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param prevsnapshot: Optional in version 2015-07-08 and newer. The prevsnapshot parameter is a DateTime value that specifies that the response will contain only pages that were changed between target blob and previous snapshot. Changed pages include both updated and cleared pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is the older of the two. Note that incremental snapshots are currently supported only for blobs - created on or after January 1, 2016. + created on or after January 1, 2016. Default value is None. :type prevsnapshot: str :param prev_snapshot_url: Optional. This header is only supported in service versions 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The response will only contain pages that were changed between the target blob and its previous - snapshot. + snapshot. Default value is None. :type prev_snapshot_url: str - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PageList, or the result of cls(response) + :return: PageList or the result of cls(response) :rtype: ~azure.storage.blob.models.PageList - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) + cls: ClsType[_models.PageList] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -973,257 +1044,249 @@ async def get_page_ranges_diff( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "pagelist" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_get_page_ranges_diff_request( + url=self._config.url, + snapshot=snapshot, + timeout=timeout, + prevsnapshot=prevsnapshot, + prev_snapshot_url=prev_snapshot_url, + range=range, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + marker=marker, + maxresults=maxresults, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_page_ranges_diff.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if prevsnapshot is not None: - query_parameters['prevsnapshot'] = self._serialize.query("prevsnapshot", prevsnapshot, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if prev_snapshot_url is not None: - header_parameters['x-ms-previous-snapshot-url'] = self._serialize.header("prev_snapshot_url", prev_snapshot_url, 'str') - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('PageList', pipeline_response) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("PageList", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_page_ranges_diff.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore - async def resize( + @distributed_trace_async + async def resize( # pylint: disable=inconsistent-return-statements self, blob_content_length: int, timeout: Optional[int] = None, - encryption_algorithm: Optional[str] = "AES256", request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_info: Optional["_models.CpkInfo"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Resize the Blob. :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 - TB. The page blob size must be aligned to a 512-byte boundary. - :type blob_content_length: long + TB. The page blob size must be aligned to a 512-byte boundary. Required. + :type blob_content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "properties" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_resize_request( + url=self._config.url, + blob_content_length=blob_content_length, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.resize.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - resize.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - async def update_sequence_number( + @distributed_trace_async + async def update_sequence_number( # pylint: disable=inconsistent-return-statements self, - sequence_number_action: Union[str, "_models.SequenceNumberActionType"], + sequence_number_action: Union[str, _models.SequenceNumberActionType], timeout: Optional[int] = None, - blob_sequence_number: Optional[int] = 0, + blob_sequence_number: int = 0, request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Update the sequence number of the blob. :param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the request. This property applies to page blobs only. This property indicates how the service - should modify the blob's sequence number. + should modify the blob's sequence number. Known values are: "max", "update", and "increment". + Required. :type sequence_number_action: str or ~azure.storage.blob.models.SequenceNumberActionType :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 - and 2^63 - 1. - :type blob_sequence_number: long + and 2^63 - 1. Default value is 0. + :type blob_sequence_number: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -1233,80 +1296,69 @@ async def update_sequence_number( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "properties" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_update_sequence_number_request( + url=self._config.url, + sequence_number_action=sequence_number_action, + timeout=timeout, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_sequence_number=blob_sequence_number, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.update_sequence_number.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-sequence-number-action'] = self._serialize.header("sequence_number_action", sequence_number_action, 'str') - if blob_sequence_number is not None: - header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - update_sequence_number.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - async def copy_incremental( + @distributed_trace_async + async def copy_incremental( # pylint: disable=inconsistent-return-statements self, copy_source: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, - **kwargs + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. The snapshot is copied such that only the differential changes between the previously copied snapshot are transferred to the destination. The copied snapshots are complete copies of @@ -1316,93 +1368,89 @@ async def copy_incremental( :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["incrementalcopy"] = kwargs.pop("comp", _params.pop("comp", "incrementalcopy")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "incrementalcopy" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_copy_incremental_request( + url=self._config.url, + copy_source=copy_source, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.copy_incremental.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) if cls: - return cls(pipeline_response, None, response_headers) - - copy_incremental.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_patch.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_patch.py new file mode 100644 index 000000000000..71dde502c70f --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_patch.py @@ -0,0 +1,26 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + + from typing import List +__all__ = [] # type: List[str] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_service_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_service_operations.py index 91a064680562..0f0e61805e05 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_service_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/aio/operations/_service_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,593 +6,648 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, IO, List, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, AsyncIterator, Callable, Dict, IO, List, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ... import models as _models - -T = TypeVar('T') +from ...operations._service_operations import ( + build_filter_blobs_request, + build_get_account_info_request, + build_get_properties_request, + build_get_statistics_request, + build_get_user_delegation_key_request, + build_list_containers_segment_request, + build_set_properties_request, + build_submit_batch_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ServiceOperations: - """ServiceOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class ServiceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`service` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def set_properties( + @distributed_trace_async + async def set_properties( # pylint: disable=inconsistent-return-statements self, - storage_service_properties: "_models.StorageServiceProperties", + storage_service_properties: _models.StorageServiceProperties, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs + **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Sets properties for a storage account's Blob service endpoint, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. - :param storage_service_properties: The StorageService properties. + :param storage_service_properties: The StorageService properties. Required. :type storage_service_properties: ~azure.storage.blob.models.StorageServiceProperties :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "properties" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.set_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(storage_service_properties, 'StorageServiceProperties', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = self._serialize.body(storage_service_properties, "StorageServiceProperties", is_xml=True) + + _request = build_set_properties_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) if cls: - return cls(pipeline_response, None, response_headers) - - set_properties.metadata = {'url': '/'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def get_properties( - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs - ) -> "_models.StorageServiceProperties": + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> _models.StorageServiceProperties: + # pylint: disable=line-too-long """gets the properties of a storage account's Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: StorageServiceProperties, or the result of cls(response) + :return: StorageServiceProperties or the result of cls(response) :rtype: ~azure.storage.blob.models.StorageServiceProperties - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceProperties"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[_models.StorageServiceProperties] = kwargs.pop("cls", None) + + _request = build_get_properties_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - deserialized = self._deserialize('StorageServiceProperties', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_properties.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def get_statistics( - self, - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs - ) -> "_models.StorageServiceStats": + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> _models.StorageServiceStats: + # pylint: disable=line-too-long """Retrieves statistics related to replication for the Blob service. It is only available on the secondary location endpoint when read-access geo-redundant replication is enabled for the storage account. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: StorageServiceStats, or the result of cls(response) + :return: StorageServiceStats or the result of cls(response) :rtype: ~azure.storage.blob.models.StorageServiceStats - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceStats"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "stats" - accept = "application/xml" - - # Construct URL - url = self.get_statistics.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) + cls: ClsType[_models.StorageServiceStats] = kwargs.pop("cls", None) + + _request = build_get_statistics_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('StorageServiceStats', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("StorageServiceStats", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_statistics.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def list_containers_segment( self, prefix: Optional[str] = None, marker: Optional[str] = None, maxresults: Optional[int] = None, - include: Optional[List[Union[str, "_models.ListContainersIncludeType"]]] = None, + include: Optional[List[Union[str, _models.ListContainersIncludeType]]] = None, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs - ) -> "_models.ListContainersSegmentResponse": + **kwargs: Any + ) -> _models.ListContainersSegmentResponse: + # pylint: disable=line-too-long """The List Containers Segment operation returns a list of the containers under the specified account. :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. + specified prefix. Default value is None. :type prefix: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int :param include: Include this parameter to specify that the container's metadata be returned as - part of the response body. + part of the response body. Default value is None. :type include: list[str or ~azure.storage.blob.models.ListContainersIncludeType] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListContainersSegmentResponse, or the result of cls(response) + :return: ListContainersSegmentResponse or the result of cls(response) :rtype: ~azure.storage.blob.models.ListContainersSegmentResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListContainersSegmentResponse"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "list" - accept = "application/xml" - - # Construct URL - url = self.list_containers_segment.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if prefix is not None: - query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - if include is not None: - query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + cls: ClsType[_models.ListContainersSegmentResponse] = kwargs.pop("cls", None) + + _request = build_list_containers_segment_request( + url=self._config.url, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - deserialized = self._deserialize('ListContainersSegmentResponse', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = self._deserialize("ListContainersSegmentResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - list_containers_segment.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def get_user_delegation_key( self, - key_info: "_models.KeyInfo", + key_info: _models.KeyInfo, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs - ) -> "_models.UserDelegationKey": + **kwargs: Any + ) -> _models.UserDelegationKey: + # pylint: disable=line-too-long """Retrieves a user delegation key for the Blob service. This is only a valid operation when using bearer token authentication. - :param key_info: + :param key_info: Key information. Required. :type key_info: ~azure.storage.blob.models.KeyInfo :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: UserDelegationKey, or the result of cls(response) + :return: UserDelegationKey or the result of cls(response) :rtype: ~azure.storage.blob.models.UserDelegationKey - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UserDelegationKey"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "userdelegationkey" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.get_user_delegation_key.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(key_info, 'KeyInfo', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[_models.UserDelegationKey] = kwargs.pop("cls", None) + + _content = self._serialize.body(key_info, "KeyInfo", is_xml=True) + + _request = build_get_user_delegation_key_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('UserDelegationKey', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("UserDelegationKey", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_user_delegation_key.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore - async def get_account_info( - self, - **kwargs + @distributed_trace_async + async def get_account_info( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: + # pylint: disable=line-too-long """Returns the sku name and account kind. - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - restype = "account" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_account_info.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_get_account_info_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) - response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) - response_headers['x-ms-is-hns-enabled']=self._deserialize('bool', response.headers.get('x-ms-is-hns-enabled')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) if cls: - return cls(pipeline_response, None, response_headers) - - get_account_info.metadata = {'url': '/'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace_async async def submit_batch( self, content_length: int, - multipart_content_type: str, - body: IO, + body: IO[bytes], timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, - **kwargs - ) -> IO: + **kwargs: Any + ) -> AsyncIterator[bytes]: + # pylint: disable=line-too-long """The Batch operation allows multiple API calls to be embedded into a single HTTP request. - :param content_length: The length of the request. - :type content_length: long - :param multipart_content_type: Required. The value of this header must be multipart/mixed with - a batch boundary. Example header value: multipart/mixed; boundary=batch_:code:``. - :type multipart_content_type: str - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IO, or the result of cls(response) - :rtype: IO - :raises: ~azure.core.exceptions.HttpResponseError + :return: AsyncIterator[bytes] or the result of cls(response) + :rtype: AsyncIterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[IO] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "batch" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.submit_batch.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - header_parameters['Content-Type'] = self._serialize.header("multipart_content_type", multipart_content_type, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(body, 'IO', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) + multipart_content_type: str = kwargs.pop( + "multipart_content_type", _headers.pop("Content-Type", "application/xml") + ) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _content = body + + _request = build_submit_batch_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + request_id_parameter=request_id_parameter, + comp=comp, + multipart_content_type=multipart_content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - submit_batch.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace_async async def filter_blobs( self, timeout: Optional[int] = None, @@ -599,93 +655,99 @@ async def filter_blobs( where: Optional[str] = None, marker: Optional[str] = None, maxresults: Optional[int] = None, - **kwargs - ) -> "_models.FilterBlobSegment": + include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, + **kwargs: Any + ) -> _models.FilterBlobSegment: + # pylint: disable=line-too-long """The Filter Blobs operation enables callers to list blobs across all containers whose tags match a given search expression. Filter blobs searches across all containers within a storage account but can be scoped within the expression to a single container. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param where: Filters the results to return only to return only blobs whose tags match the - specified expression. + specified expression. Default value is None. :type where: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int - :keyword callable cls: A custom type or function that will be passed the direct response - :return: FilterBlobSegment, or the result of cls(response) + :param include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :type include: list[str or ~azure.storage.blob.models.FilterBlobsIncludeItem] + :return: FilterBlobSegment or the result of cls(response) :rtype: ~azure.storage.blob.models.FilterBlobSegment - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.FilterBlobSegment"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "blobs" - accept = "application/xml" - - # Construct URL - url = self.filter_blobs.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if where is not None: - query_parameters['where'] = self._serialize.query("where", where, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) + cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) + + _request = build_filter_blobs_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + where=where, + marker=marker, + maxresults=maxresults, + include=include, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('FilterBlobSegment', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - filter_blobs.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/__init__.py index 9c98989e6847..63ca7e23fc24 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/__init__.py @@ -6,218 +6,168 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -try: - from ._models_py3 import AccessPolicy - from ._models_py3 import AppendPositionAccessConditions - from ._models_py3 import ArrowConfiguration - from ._models_py3 import ArrowField - from ._models_py3 import BlobFlatListSegment - from ._models_py3 import BlobHTTPHeaders - from ._models_py3 import BlobHierarchyListSegment - from ._models_py3 import BlobItemInternal - from ._models_py3 import BlobMetadata - from ._models_py3 import BlobPrefix - from ._models_py3 import BlobPropertiesInternal - from ._models_py3 import BlobTag - from ._models_py3 import BlobTags - from ._models_py3 import Block - from ._models_py3 import BlockList - from ._models_py3 import BlockLookupList - from ._models_py3 import ClearRange - from ._models_py3 import ContainerCpkScopeInfo - from ._models_py3 import ContainerItem - from ._models_py3 import ContainerProperties - from ._models_py3 import CorsRule - from ._models_py3 import CpkInfo - from ._models_py3 import CpkScopeInfo - from ._models_py3 import DataLakeStorageError - from ._models_py3 import DataLakeStorageErrorDetails - from ._models_py3 import DelimitedTextConfiguration - from ._models_py3 import DirectoryHttpHeaders - from ._models_py3 import FilterBlobItem - from ._models_py3 import FilterBlobSegment - from ._models_py3 import GeoReplication - from ._models_py3 import JsonTextConfiguration - from ._models_py3 import KeyInfo - from ._models_py3 import LeaseAccessConditions - from ._models_py3 import ListBlobsFlatSegmentResponse - from ._models_py3 import ListBlobsHierarchySegmentResponse - from ._models_py3 import ListContainersSegmentResponse - from ._models_py3 import Logging - from ._models_py3 import Metrics - from ._models_py3 import ModifiedAccessConditions - from ._models_py3 import PageList - from ._models_py3 import PageRange - from ._models_py3 import QueryFormat - from ._models_py3 import QueryRequest - from ._models_py3 import QuerySerialization - from ._models_py3 import RetentionPolicy - from ._models_py3 import SequenceNumberAccessConditions - from ._models_py3 import SignedIdentifier - from ._models_py3 import SourceModifiedAccessConditions - from ._models_py3 import StaticWebsite - from ._models_py3 import StorageError - from ._models_py3 import StorageServiceProperties - from ._models_py3 import StorageServiceStats - from ._models_py3 import UserDelegationKey -except (SyntaxError, ImportError): - from ._models import AccessPolicy # type: ignore - from ._models import AppendPositionAccessConditions # type: ignore - from ._models import ArrowConfiguration # type: ignore - from ._models import ArrowField # type: ignore - from ._models import BlobFlatListSegment # type: ignore - from ._models import BlobHTTPHeaders # type: ignore - from ._models import BlobHierarchyListSegment # type: ignore - from ._models import BlobItemInternal # type: ignore - from ._models import BlobMetadata # type: ignore - from ._models import BlobPrefix # type: ignore - from ._models import BlobPropertiesInternal # type: ignore - from ._models import BlobTag # type: ignore - from ._models import BlobTags # type: ignore - from ._models import Block # type: ignore - from ._models import BlockList # type: ignore - from ._models import BlockLookupList # type: ignore - from ._models import ClearRange # type: ignore - from ._models import ContainerCpkScopeInfo # type: ignore - from ._models import ContainerItem # type: ignore - from ._models import ContainerProperties # type: ignore - from ._models import CorsRule # type: ignore - from ._models import CpkInfo # type: ignore - from ._models import CpkScopeInfo # type: ignore - from ._models import DataLakeStorageError # type: ignore - from ._models import DataLakeStorageErrorDetails # type: ignore - from ._models import DelimitedTextConfiguration # type: ignore - from ._models import DirectoryHttpHeaders # type: ignore - from ._models import FilterBlobItem # type: ignore - from ._models import FilterBlobSegment # type: ignore - from ._models import GeoReplication # type: ignore - from ._models import JsonTextConfiguration # type: ignore - from ._models import KeyInfo # type: ignore - from ._models import LeaseAccessConditions # type: ignore - from ._models import ListBlobsFlatSegmentResponse # type: ignore - from ._models import ListBlobsHierarchySegmentResponse # type: ignore - from ._models import ListContainersSegmentResponse # type: ignore - from ._models import Logging # type: ignore - from ._models import Metrics # type: ignore - from ._models import ModifiedAccessConditions # type: ignore - from ._models import PageList # type: ignore - from ._models import PageRange # type: ignore - from ._models import QueryFormat # type: ignore - from ._models import QueryRequest # type: ignore - from ._models import QuerySerialization # type: ignore - from ._models import RetentionPolicy # type: ignore - from ._models import SequenceNumberAccessConditions # type: ignore - from ._models import SignedIdentifier # type: ignore - from ._models import SourceModifiedAccessConditions # type: ignore - from ._models import StaticWebsite # type: ignore - from ._models import StorageError # type: ignore - from ._models import StorageServiceProperties # type: ignore - from ._models import StorageServiceStats # type: ignore - from ._models import UserDelegationKey # type: ignore +from ._models_py3 import AccessPolicy +from ._models_py3 import AppendPositionAccessConditions +from ._models_py3 import ArrowConfiguration +from ._models_py3 import ArrowField +from ._models_py3 import BlobFlatListSegment +from ._models_py3 import BlobHTTPHeaders +from ._models_py3 import BlobHierarchyListSegment +from ._models_py3 import BlobItemInternal +from ._models_py3 import BlobMetadata +from ._models_py3 import BlobName +from ._models_py3 import BlobPrefix +from ._models_py3 import BlobPropertiesInternal +from ._models_py3 import BlobTag +from ._models_py3 import BlobTags +from ._models_py3 import Block +from ._models_py3 import BlockList +from ._models_py3 import BlockLookupList +from ._models_py3 import ClearRange +from ._models_py3 import ContainerCpkScopeInfo +from ._models_py3 import ContainerItem +from ._models_py3 import ContainerProperties +from ._models_py3 import CorsRule +from ._models_py3 import CpkInfo +from ._models_py3 import CpkScopeInfo +from ._models_py3 import DelimitedTextConfiguration +from ._models_py3 import FilterBlobItem +from ._models_py3 import FilterBlobSegment +from ._models_py3 import GeoReplication +from ._models_py3 import JsonTextConfiguration +from ._models_py3 import KeyInfo +from ._models_py3 import LeaseAccessConditions +from ._models_py3 import ListBlobsFlatSegmentResponse +from ._models_py3 import ListBlobsHierarchySegmentResponse +from ._models_py3 import ListContainersSegmentResponse +from ._models_py3 import Logging +from ._models_py3 import Metrics +from ._models_py3 import ModifiedAccessConditions +from ._models_py3 import PageList +from ._models_py3 import PageRange +from ._models_py3 import QueryFormat +from ._models_py3 import QueryRequest +from ._models_py3 import QuerySerialization +from ._models_py3 import RetentionPolicy +from ._models_py3 import SequenceNumberAccessConditions +from ._models_py3 import SignedIdentifier +from ._models_py3 import SourceModifiedAccessConditions +from ._models_py3 import StaticWebsite +from ._models_py3 import StorageError +from ._models_py3 import StorageServiceProperties +from ._models_py3 import StorageServiceStats +from ._models_py3 import UserDelegationKey -from ._azure_blob_storage_enums import ( - AccessTier, - AccessTierOptional, - AccessTierRequired, - AccountKind, - ArchiveStatus, - BlobExpiryOptions, - BlobType, - BlockListType, - CopyStatusType, - DeleteSnapshotsOptionType, - GeoReplicationStatusType, - LeaseDurationType, - LeaseStateType, - LeaseStatusType, - ListBlobsIncludeItem, - ListContainersIncludeType, - PathRenameMode, - PremiumPageBlobAccessTier, - PublicAccessType, - QueryFormatType, - RehydratePriority, - SequenceNumberActionType, - SkuName, - StorageErrorCode, -) +from ._azure_blob_storage_enums import AccessTier +from ._azure_blob_storage_enums import AccessTierOptional +from ._azure_blob_storage_enums import AccessTierRequired +from ._azure_blob_storage_enums import AccountKind +from ._azure_blob_storage_enums import ArchiveStatus +from ._azure_blob_storage_enums import BlobCopySourceTags +from ._azure_blob_storage_enums import BlobExpiryOptions +from ._azure_blob_storage_enums import BlobImmutabilityPolicyMode +from ._azure_blob_storage_enums import BlobType +from ._azure_blob_storage_enums import BlockListType +from ._azure_blob_storage_enums import CopyStatusType +from ._azure_blob_storage_enums import DeleteSnapshotsOptionType +from ._azure_blob_storage_enums import EncryptionAlgorithmType +from ._azure_blob_storage_enums import FilterBlobsIncludeItem +from ._azure_blob_storage_enums import GeoReplicationStatusType +from ._azure_blob_storage_enums import LeaseDurationType +from ._azure_blob_storage_enums import LeaseStateType +from ._azure_blob_storage_enums import LeaseStatusType +from ._azure_blob_storage_enums import ListBlobsIncludeItem +from ._azure_blob_storage_enums import ListContainersIncludeType +from ._azure_blob_storage_enums import PremiumPageBlobAccessTier +from ._azure_blob_storage_enums import PublicAccessType +from ._azure_blob_storage_enums import QueryFormatType +from ._azure_blob_storage_enums import RehydratePriority +from ._azure_blob_storage_enums import SequenceNumberActionType +from ._azure_blob_storage_enums import SkuName +from ._azure_blob_storage_enums import StorageErrorCode +from ._patch import __all__ as _patch_all +from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk __all__ = [ - 'AccessPolicy', - 'AppendPositionAccessConditions', - 'ArrowConfiguration', - 'ArrowField', - 'BlobFlatListSegment', - 'BlobHTTPHeaders', - 'BlobHierarchyListSegment', - 'BlobItemInternal', - 'BlobMetadata', - 'BlobPrefix', - 'BlobPropertiesInternal', - 'BlobTag', - 'BlobTags', - 'Block', - 'BlockList', - 'BlockLookupList', - 'ClearRange', - 'ContainerCpkScopeInfo', - 'ContainerItem', - 'ContainerProperties', - 'CorsRule', - 'CpkInfo', - 'CpkScopeInfo', - 'DataLakeStorageError', - 'DataLakeStorageErrorDetails', - 'DelimitedTextConfiguration', - 'DirectoryHttpHeaders', - 'FilterBlobItem', - 'FilterBlobSegment', - 'GeoReplication', - 'JsonTextConfiguration', - 'KeyInfo', - 'LeaseAccessConditions', - 'ListBlobsFlatSegmentResponse', - 'ListBlobsHierarchySegmentResponse', - 'ListContainersSegmentResponse', - 'Logging', - 'Metrics', - 'ModifiedAccessConditions', - 'PageList', - 'PageRange', - 'QueryFormat', - 'QueryRequest', - 'QuerySerialization', - 'RetentionPolicy', - 'SequenceNumberAccessConditions', - 'SignedIdentifier', - 'SourceModifiedAccessConditions', - 'StaticWebsite', - 'StorageError', - 'StorageServiceProperties', - 'StorageServiceStats', - 'UserDelegationKey', - 'AccessTier', - 'AccessTierOptional', - 'AccessTierRequired', - 'AccountKind', - 'ArchiveStatus', - 'BlobExpiryOptions', - 'BlobType', - 'BlockListType', - 'CopyStatusType', - 'DeleteSnapshotsOptionType', - 'GeoReplicationStatusType', - 'LeaseDurationType', - 'LeaseStateType', - 'LeaseStatusType', - 'ListBlobsIncludeItem', - 'ListContainersIncludeType', - 'PathRenameMode', - 'PremiumPageBlobAccessTier', - 'PublicAccessType', - 'QueryFormatType', - 'RehydratePriority', - 'SequenceNumberActionType', - 'SkuName', - 'StorageErrorCode', + "AccessPolicy", + "AppendPositionAccessConditions", + "ArrowConfiguration", + "ArrowField", + "BlobFlatListSegment", + "BlobHTTPHeaders", + "BlobHierarchyListSegment", + "BlobItemInternal", + "BlobMetadata", + "BlobName", + "BlobPrefix", + "BlobPropertiesInternal", + "BlobTag", + "BlobTags", + "Block", + "BlockList", + "BlockLookupList", + "ClearRange", + "ContainerCpkScopeInfo", + "ContainerItem", + "ContainerProperties", + "CorsRule", + "CpkInfo", + "CpkScopeInfo", + "DelimitedTextConfiguration", + "FilterBlobItem", + "FilterBlobSegment", + "GeoReplication", + "JsonTextConfiguration", + "KeyInfo", + "LeaseAccessConditions", + "ListBlobsFlatSegmentResponse", + "ListBlobsHierarchySegmentResponse", + "ListContainersSegmentResponse", + "Logging", + "Metrics", + "ModifiedAccessConditions", + "PageList", + "PageRange", + "QueryFormat", + "QueryRequest", + "QuerySerialization", + "RetentionPolicy", + "SequenceNumberAccessConditions", + "SignedIdentifier", + "SourceModifiedAccessConditions", + "StaticWebsite", + "StorageError", + "StorageServiceProperties", + "StorageServiceStats", + "UserDelegationKey", + "AccessTier", + "AccessTierOptional", + "AccessTierRequired", + "AccountKind", + "ArchiveStatus", + "BlobCopySourceTags", + "BlobExpiryOptions", + "BlobImmutabilityPolicyMode", + "BlobType", + "BlockListType", + "CopyStatusType", + "DeleteSnapshotsOptionType", + "EncryptionAlgorithmType", + "FilterBlobsIncludeItem", + "GeoReplicationStatusType", + "LeaseDurationType", + "LeaseStateType", + "LeaseStatusType", + "ListBlobsIncludeItem", + "ListContainersIncludeType", + "PremiumPageBlobAccessTier", + "PublicAccessType", + "QueryFormatType", + "RehydratePriority", + "SequenceNumberActionType", + "SkuName", + "StorageErrorCode", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_azure_blob_storage_enums.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_azure_blob_storage_enums.py index 2df7b1ad6219..12ccbf7312f9 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_azure_blob_storage_enums.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_azure_blob_storage_enums.py @@ -6,27 +6,12 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from enum import Enum, EnumMeta -from six import with_metaclass +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta -class _CaseInsensitiveEnumMeta(EnumMeta): - def __getitem__(self, name): - return super().__getitem__(name.upper()) - def __getattr__(cls, name): - """Return the enum member matching `name` - We use __getattr__ instead of descriptors or inserting into the enum - class' __dict__ in order to support `name` and `value` being both - properties for enum members (which live in the class' __dict__) and - enum members themselves. - """ - try: - return cls._member_map_[name.upper()] - except KeyError: - raise AttributeError(name) - - -class AccessTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class AccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AccessTier.""" P4 = "P4" P6 = "P6" @@ -42,8 +27,12 @@ class AccessTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): HOT = "Hot" COOL = "Cool" ARCHIVE = "Archive" + PREMIUM = "Premium" + COLD = "Cold" + -class AccessTierOptional(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class AccessTierOptional(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AccessTierOptional.""" P4 = "P4" P6 = "P6" @@ -59,8 +48,11 @@ class AccessTierOptional(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): HOT = "Hot" COOL = "Cool" ARCHIVE = "Archive" + COLD = "Cold" -class AccessTierRequired(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class AccessTierRequired(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AccessTierRequired.""" P4 = "P4" P6 = "P6" @@ -76,8 +68,11 @@ class AccessTierRequired(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): HOT = "Hot" COOL = "Cool" ARCHIVE = "Archive" + COLD = "Cold" + -class AccountKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class AccountKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AccountKind.""" STORAGE = "Storage" BLOB_STORAGE = "BlobStorage" @@ -85,56 +80,102 @@ class AccountKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): FILE_STORAGE = "FileStorage" BLOCK_BLOB_STORAGE = "BlockBlobStorage" -class ArchiveStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class ArchiveStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ArchiveStatus.""" REHYDRATE_PENDING_TO_HOT = "rehydrate-pending-to-hot" REHYDRATE_PENDING_TO_COOL = "rehydrate-pending-to-cool" + REHYDRATE_PENDING_TO_COLD = "rehydrate-pending-to-cold" + + +class BlobCopySourceTags(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """BlobCopySourceTags.""" + + REPLACE = "REPLACE" + COPY = "COPY" + -class BlobExpiryOptions(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class BlobExpiryOptions(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """BlobExpiryOptions.""" NEVER_EXPIRE = "NeverExpire" RELATIVE_TO_CREATION = "RelativeToCreation" RELATIVE_TO_NOW = "RelativeToNow" ABSOLUTE = "Absolute" -class BlobType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class BlobImmutabilityPolicyMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """BlobImmutabilityPolicyMode.""" + + MUTABLE = "Mutable" + UNLOCKED = "Unlocked" + LOCKED = "Locked" + + +class BlobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """BlobType.""" BLOCK_BLOB = "BlockBlob" PAGE_BLOB = "PageBlob" APPEND_BLOB = "AppendBlob" -class BlockListType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class BlockListType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """BlockListType.""" COMMITTED = "committed" UNCOMMITTED = "uncommitted" ALL = "all" -class CopyStatusType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class CopyStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """CopyStatusType.""" PENDING = "pending" SUCCESS = "success" ABORTED = "aborted" FAILED = "failed" -class DeleteSnapshotsOptionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class DeleteSnapshotsOptionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """DeleteSnapshotsOptionType.""" INCLUDE = "include" ONLY = "only" -class GeoReplicationStatusType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The status of the secondary location - """ + +class EncryptionAlgorithmType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """EncryptionAlgorithmType.""" + + NONE = "None" + AES256 = "AES256" + + +class FilterBlobsIncludeItem(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """FilterBlobsIncludeItem.""" + + NONE = "none" + VERSIONS = "versions" + + +class GeoReplicationStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The status of the secondary location.""" LIVE = "live" BOOTSTRAP = "bootstrap" UNAVAILABLE = "unavailable" -class LeaseDurationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class LeaseDurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """LeaseDurationType.""" INFINITE = "infinite" FIXED = "fixed" -class LeaseStateType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class LeaseStateType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """LeaseStateType.""" AVAILABLE = "available" LEASED = "leased" @@ -142,12 +183,16 @@ class LeaseStateType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): BREAKING = "breaking" BROKEN = "broken" -class LeaseStatusType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class LeaseStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """LeaseStatusType.""" LOCKED = "locked" UNLOCKED = "unlocked" -class ListBlobsIncludeItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class ListBlobsIncludeItem(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ListBlobsIncludeItem.""" COPY = "copy" DELETED = "deleted" @@ -156,18 +201,21 @@ class ListBlobsIncludeItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): UNCOMMITTEDBLOBS = "uncommittedblobs" VERSIONS = "versions" TAGS = "tags" + IMMUTABILITYPOLICY = "immutabilitypolicy" + LEGALHOLD = "legalhold" + DELETEDWITHVERSIONS = "deletedwithversions" -class ListContainersIncludeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class ListContainersIncludeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ListContainersIncludeType.""" METADATA = "metadata" DELETED = "deleted" + SYSTEM = "system" -class PathRenameMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - - LEGACY = "legacy" - POSIX = "posix" -class PremiumPageBlobAccessTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class PremiumPageBlobAccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """PremiumPageBlobAccessTier.""" P4 = "P4" P6 = "P6" @@ -181,20 +229,24 @@ class PremiumPageBlobAccessTier(with_metaclass(_CaseInsensitiveEnumMeta, str, En P70 = "P70" P80 = "P80" -class PublicAccessType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class PublicAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """PublicAccessType.""" CONTAINER = "container" BLOB = "blob" -class QueryFormatType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The quick query format type. - """ + +class QueryFormatType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The quick query format type.""" DELIMITED = "delimited" JSON = "json" ARROW = "arrow" + PARQUET = "parquet" -class RehydratePriority(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class RehydratePriority(str, Enum, metaclass=CaseInsensitiveEnumMeta): """If an object is in rehydrate pending state then this header is returned with priority of rehydrate. Valid values are High and Standard. """ @@ -202,13 +254,17 @@ class RehydratePriority(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): HIGH = "High" STANDARD = "Standard" -class SequenceNumberActionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class SequenceNumberActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """SequenceNumberActionType.""" MAX = "max" UPDATE = "update" INCREMENT = "increment" -class SkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class SkuName(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """SkuName.""" STANDARD_LRS = "Standard_LRS" STANDARD_GRS = "Standard_GRS" @@ -216,9 +272,9 @@ class SkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): STANDARD_ZRS = "Standard_ZRS" PREMIUM_LRS = "Premium_LRS" -class StorageErrorCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Error codes returned by the service - """ + +class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Error codes returned by the service.""" ACCOUNT_ALREADY_EXISTS = "AccountAlreadyExists" ACCOUNT_BEING_CREATED = "AccountBeingCreated" @@ -268,6 +324,7 @@ class StorageErrorCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): BLOB_NOT_FOUND = "BlobNotFound" BLOB_OVERWRITTEN = "BlobOverwritten" BLOB_TIER_INADEQUATE_FOR_CONTENT_LENGTH = "BlobTierInadequateForContentLength" + BLOB_USES_CUSTOMER_SPECIFIED_ENCRYPTION = "BlobUsesCustomerSpecifiedEncryption" BLOCK_COUNT_EXCEEDS_LIMIT = "BlockCountExceedsLimit" BLOCK_LIST_TOO_LONG = "BlockListTooLong" CANNOT_CHANGE_TO_LOWER_TIER = "CannotChangeToLowerTier" @@ -281,7 +338,7 @@ class StorageErrorCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): COPY_ID_MISMATCH = "CopyIdMismatch" FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" - INCREMENTAL_COPY_OF_ERALIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEralierVersionSnapshotNotAllowed" + INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" INCREMENTAL_COPY_SOURCE_MUST_BE_SNAPSHOT = "IncrementalCopySourceMustBeSnapshot" INFINITE_LEASE_DURATION_REQUIRED = "InfiniteLeaseDurationRequired" INVALID_BLOB_OR_BLOCK = "InvalidBlobOrBlock" @@ -318,7 +375,7 @@ class StorageErrorCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): SEQUENCE_NUMBER_CONDITION_NOT_MET = "SequenceNumberConditionNotMet" SEQUENCE_NUMBER_INCREMENT_TOO_LARGE = "SequenceNumberIncrementTooLarge" SNAPSHOT_COUNT_EXCEEDED = "SnapshotCountExceeded" - SNAPHOT_OPERATION_RATE_EXCEEDED = "SnaphotOperationRateExceeded" + SNAPSHOT_OPERATION_RATE_EXCEEDED = "SnapshotOperationRateExceeded" SNAPSHOTS_PRESENT = "SnapshotsPresent" SOURCE_CONDITION_NOT_MET = "SourceConditionNotMet" SYSTEM_IN_USE = "SystemInUse" @@ -332,3 +389,4 @@ class StorageErrorCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): AUTHORIZATION_PERMISSION_MISMATCH = "AuthorizationPermissionMismatch" AUTHORIZATION_SERVICE_MISMATCH = "AuthorizationServiceMismatch" AUTHORIZATION_RESOURCE_TYPE_MISMATCH = "AuthorizationResourceTypeMismatch" + BLOB_ACCESS_TIER_NOT_SUPPORTED_FOR_ACCOUNT_TYPE = "BlobAccessTierNotSupportedForAccountType" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_models.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_models.py deleted file mode 100644 index a92bc6b05f34..000000000000 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_models.py +++ /dev/null @@ -1,2024 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.exceptions import HttpResponseError -import msrest.serialization - - -class AccessPolicy(msrest.serialization.Model): - """An Access policy. - - :param start: the date-time the policy is active. - :type start: str - :param expiry: the date-time the policy expires. - :type expiry: str - :param permission: the permissions for the acl policy. - :type permission: str - """ - - _attribute_map = { - 'start': {'key': 'Start', 'type': 'str'}, - 'expiry': {'key': 'Expiry', 'type': 'str'}, - 'permission': {'key': 'Permission', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AccessPolicy, self).__init__(**kwargs) - self.start = kwargs.get('start', None) - self.expiry = kwargs.get('expiry', None) - self.permission = kwargs.get('permission', None) - - -class AppendPositionAccessConditions(msrest.serialization.Model): - """Parameter group. - - :param max_size: Optional conditional header. The max length in bytes permitted for the append - blob. If the Append Block operation would cause the blob to exceed that limit or if the blob - size is already greater than the value specified in this header, the request will fail with - MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). - :type max_size: long - :param append_position: Optional conditional header, used only for the Append Block operation. - A number indicating the byte offset to compare. Append Block will succeed only if the append - position is equal to this number. If it is not, the request will fail with the - AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). - :type append_position: long - """ - - _attribute_map = { - 'max_size': {'key': 'maxSize', 'type': 'long'}, - 'append_position': {'key': 'appendPosition', 'type': 'long'}, - } - - def __init__( - self, - **kwargs - ): - super(AppendPositionAccessConditions, self).__init__(**kwargs) - self.max_size = kwargs.get('max_size', None) - self.append_position = kwargs.get('append_position', None) - - -class ArrowConfiguration(msrest.serialization.Model): - """arrow configuration. - - All required parameters must be populated in order to send to Azure. - - :param schema: Required. - :type schema: list[~azure.storage.blob.models.ArrowField] - """ - - _validation = { - 'schema': {'required': True}, - } - - _attribute_map = { - 'schema': {'key': 'Schema', 'type': '[ArrowField]', 'xml': {'name': 'Schema', 'wrapped': True, 'itemsName': 'Field'}}, - } - _xml_map = { - 'name': 'ArrowConfiguration' - } - - def __init__( - self, - **kwargs - ): - super(ArrowConfiguration, self).__init__(**kwargs) - self.schema = kwargs['schema'] - - -class ArrowField(msrest.serialization.Model): - """field of an arrow schema. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. - :type type: str - :param name: - :type name: str - :param precision: - :type precision: int - :param scale: - :type scale: int - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'Type', 'type': 'str'}, - 'name': {'key': 'Name', 'type': 'str'}, - 'precision': {'key': 'Precision', 'type': 'int'}, - 'scale': {'key': 'Scale', 'type': 'int'}, - } - _xml_map = { - 'name': 'Field' - } - - def __init__( - self, - **kwargs - ): - super(ArrowField, self).__init__(**kwargs) - self.type = kwargs['type'] - self.name = kwargs.get('name', None) - self.precision = kwargs.get('precision', None) - self.scale = kwargs.get('scale', None) - - -class BlobFlatListSegment(msrest.serialization.Model): - """BlobFlatListSegment. - - All required parameters must be populated in order to send to Azure. - - :param blob_items: Required. - :type blob_items: list[~azure.storage.blob.models.BlobItemInternal] - """ - - _validation = { - 'blob_items': {'required': True}, - } - - _attribute_map = { - 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]'}, - } - _xml_map = { - 'name': 'Blobs' - } - - def __init__( - self, - **kwargs - ): - super(BlobFlatListSegment, self).__init__(**kwargs) - self.blob_items = kwargs['blob_items'] - - -class BlobHierarchyListSegment(msrest.serialization.Model): - """BlobHierarchyListSegment. - - All required parameters must be populated in order to send to Azure. - - :param blob_prefixes: - :type blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] - :param blob_items: Required. - :type blob_items: list[~azure.storage.blob.models.BlobItemInternal] - """ - - _validation = { - 'blob_items': {'required': True}, - } - - _attribute_map = { - 'blob_prefixes': {'key': 'BlobPrefixes', 'type': '[BlobPrefix]', 'xml': {'name': 'BlobPrefix'}}, - 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'Blob', 'itemsName': 'Blob'}}, - } - _xml_map = { - 'name': 'Blobs' - } - - def __init__( - self, - **kwargs - ): - super(BlobHierarchyListSegment, self).__init__(**kwargs) - self.blob_prefixes = kwargs.get('blob_prefixes', None) - self.blob_items = kwargs['blob_items'] - - -class BlobHTTPHeaders(msrest.serialization.Model): - """Parameter group. - - :param blob_cache_control: Optional. Sets the blob's cache control. If specified, this property - is stored with the blob and returned with a read request. - :type blob_cache_control: str - :param blob_content_type: Optional. Sets the blob's content type. If specified, this property - is stored with the blob and returned with a read request. - :type blob_content_type: str - :param blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is not - validated, as the hashes for the individual blocks were validated when each was uploaded. - :type blob_content_md5: bytearray - :param blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this - property is stored with the blob and returned with a read request. - :type blob_content_encoding: str - :param blob_content_language: Optional. Set the blob's content language. If specified, this - property is stored with the blob and returned with a read request. - :type blob_content_language: str - :param blob_content_disposition: Optional. Sets the blob's Content-Disposition header. - :type blob_content_disposition: str - """ - - _attribute_map = { - 'blob_cache_control': {'key': 'blobCacheControl', 'type': 'str'}, - 'blob_content_type': {'key': 'blobContentType', 'type': 'str'}, - 'blob_content_md5': {'key': 'blobContentMD5', 'type': 'bytearray'}, - 'blob_content_encoding': {'key': 'blobContentEncoding', 'type': 'str'}, - 'blob_content_language': {'key': 'blobContentLanguage', 'type': 'str'}, - 'blob_content_disposition': {'key': 'blobContentDisposition', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(BlobHTTPHeaders, self).__init__(**kwargs) - self.blob_cache_control = kwargs.get('blob_cache_control', None) - self.blob_content_type = kwargs.get('blob_content_type', None) - self.blob_content_md5 = kwargs.get('blob_content_md5', None) - self.blob_content_encoding = kwargs.get('blob_content_encoding', None) - self.blob_content_language = kwargs.get('blob_content_language', None) - self.blob_content_disposition = kwargs.get('blob_content_disposition', None) - - -class BlobItemInternal(msrest.serialization.Model): - """An Azure Storage blob. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. - :type name: str - :param deleted: Required. - :type deleted: bool - :param snapshot: Required. - :type snapshot: str - :param version_id: - :type version_id: str - :param is_current_version: - :type is_current_version: bool - :param properties: Required. Properties of a blob. - :type properties: ~azure.storage.blob.models.BlobPropertiesInternal - :param metadata: - :type metadata: ~azure.storage.blob.models.BlobMetadata - :param blob_tags: Blob tags. - :type blob_tags: ~azure.storage.blob.models.BlobTags - :param object_replication_metadata: Dictionary of :code:``. - :type object_replication_metadata: dict[str, str] - """ - - _validation = { - 'name': {'required': True}, - 'deleted': {'required': True}, - 'snapshot': {'required': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, - 'deleted': {'key': 'Deleted', 'type': 'bool'}, - 'snapshot': {'key': 'Snapshot', 'type': 'str'}, - 'version_id': {'key': 'VersionId', 'type': 'str'}, - 'is_current_version': {'key': 'IsCurrentVersion', 'type': 'bool'}, - 'properties': {'key': 'Properties', 'type': 'BlobPropertiesInternal'}, - 'metadata': {'key': 'Metadata', 'type': 'BlobMetadata'}, - 'blob_tags': {'key': 'BlobTags', 'type': 'BlobTags'}, - 'object_replication_metadata': {'key': 'OrMetadata', 'type': '{str}'}, - } - _xml_map = { - 'name': 'Blob' - } - - def __init__( - self, - **kwargs - ): - super(BlobItemInternal, self).__init__(**kwargs) - self.name = kwargs['name'] - self.deleted = kwargs['deleted'] - self.snapshot = kwargs['snapshot'] - self.version_id = kwargs.get('version_id', None) - self.is_current_version = kwargs.get('is_current_version', None) - self.properties = kwargs['properties'] - self.metadata = kwargs.get('metadata', None) - self.blob_tags = kwargs.get('blob_tags', None) - self.object_replication_metadata = kwargs.get('object_replication_metadata', None) - - -class BlobMetadata(msrest.serialization.Model): - """BlobMetadata. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, str] - :param encrypted: - :type encrypted: str - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{str}'}, - 'encrypted': {'key': 'Encrypted', 'type': 'str', 'xml': {'attr': True}}, - } - _xml_map = { - 'name': 'Metadata' - } - - def __init__( - self, - **kwargs - ): - super(BlobMetadata, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.encrypted = kwargs.get('encrypted', None) - - -class BlobPrefix(msrest.serialization.Model): - """BlobPrefix. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. - :type name: str - """ - - _validation = { - 'name': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(BlobPrefix, self).__init__(**kwargs) - self.name = kwargs['name'] - - -class BlobPropertiesInternal(msrest.serialization.Model): - """Properties of a blob. - - All required parameters must be populated in order to send to Azure. - - :param creation_time: - :type creation_time: ~datetime.datetime - :param last_modified: Required. - :type last_modified: ~datetime.datetime - :param etag: Required. - :type etag: str - :param content_length: Size in bytes. - :type content_length: long - :param content_type: - :type content_type: str - :param content_encoding: - :type content_encoding: str - :param content_language: - :type content_language: str - :param content_md5: - :type content_md5: bytearray - :param content_disposition: - :type content_disposition: str - :param cache_control: - :type cache_control: str - :param blob_sequence_number: - :type blob_sequence_number: long - :param blob_type: Possible values include: "BlockBlob", "PageBlob", "AppendBlob". - :type blob_type: str or ~azure.storage.blob.models.BlobType - :param lease_status: Possible values include: "locked", "unlocked". - :type lease_status: str or ~azure.storage.blob.models.LeaseStatusType - :param lease_state: Possible values include: "available", "leased", "expired", "breaking", - "broken". - :type lease_state: str or ~azure.storage.blob.models.LeaseStateType - :param lease_duration: Possible values include: "infinite", "fixed". - :type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType - :param copy_id: - :type copy_id: str - :param copy_status: Possible values include: "pending", "success", "aborted", "failed". - :type copy_status: str or ~azure.storage.blob.models.CopyStatusType - :param copy_source: - :type copy_source: str - :param copy_progress: - :type copy_progress: str - :param copy_completion_time: - :type copy_completion_time: ~datetime.datetime - :param copy_status_description: - :type copy_status_description: str - :param server_encrypted: - :type server_encrypted: bool - :param incremental_copy: - :type incremental_copy: bool - :param destination_snapshot: - :type destination_snapshot: str - :param deleted_time: - :type deleted_time: ~datetime.datetime - :param remaining_retention_days: - :type remaining_retention_days: int - :param access_tier: Possible values include: "P4", "P6", "P10", "P15", "P20", "P30", "P40", - "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive". - :type access_tier: str or ~azure.storage.blob.models.AccessTier - :param access_tier_inferred: - :type access_tier_inferred: bool - :param archive_status: Possible values include: "rehydrate-pending-to-hot", "rehydrate- - pending-to-cool". - :type archive_status: str or ~azure.storage.blob.models.ArchiveStatus - :param customer_provided_key_sha256: - :type customer_provided_key_sha256: str - :param encryption_scope: The name of the encryption scope under which the blob is encrypted. - :type encryption_scope: str - :param access_tier_change_time: - :type access_tier_change_time: ~datetime.datetime - :param tag_count: - :type tag_count: int - :param expires_on: - :type expires_on: ~datetime.datetime - :param is_sealed: - :type is_sealed: bool - :param rehydrate_priority: If an object is in rehydrate pending state then this header is - returned with priority of rehydrate. Valid values are High and Standard. Possible values - include: "High", "Standard". - :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority - :param last_accessed_on: - :type last_accessed_on: ~datetime.datetime - """ - - _validation = { - 'last_modified': {'required': True}, - 'etag': {'required': True}, - } - - _attribute_map = { - 'creation_time': {'key': 'Creation-Time', 'type': 'rfc-1123'}, - 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, - 'etag': {'key': 'Etag', 'type': 'str'}, - 'content_length': {'key': 'Content-Length', 'type': 'long'}, - 'content_type': {'key': 'Content-Type', 'type': 'str'}, - 'content_encoding': {'key': 'Content-Encoding', 'type': 'str'}, - 'content_language': {'key': 'Content-Language', 'type': 'str'}, - 'content_md5': {'key': 'Content-MD5', 'type': 'bytearray'}, - 'content_disposition': {'key': 'Content-Disposition', 'type': 'str'}, - 'cache_control': {'key': 'Cache-Control', 'type': 'str'}, - 'blob_sequence_number': {'key': 'x-ms-blob-sequence-number', 'type': 'long'}, - 'blob_type': {'key': 'BlobType', 'type': 'str'}, - 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, - 'lease_state': {'key': 'LeaseState', 'type': 'str'}, - 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, - 'copy_id': {'key': 'CopyId', 'type': 'str'}, - 'copy_status': {'key': 'CopyStatus', 'type': 'str'}, - 'copy_source': {'key': 'CopySource', 'type': 'str'}, - 'copy_progress': {'key': 'CopyProgress', 'type': 'str'}, - 'copy_completion_time': {'key': 'CopyCompletionTime', 'type': 'rfc-1123'}, - 'copy_status_description': {'key': 'CopyStatusDescription', 'type': 'str'}, - 'server_encrypted': {'key': 'ServerEncrypted', 'type': 'bool'}, - 'incremental_copy': {'key': 'IncrementalCopy', 'type': 'bool'}, - 'destination_snapshot': {'key': 'DestinationSnapshot', 'type': 'str'}, - 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, - 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, - 'access_tier': {'key': 'AccessTier', 'type': 'str'}, - 'access_tier_inferred': {'key': 'AccessTierInferred', 'type': 'bool'}, - 'archive_status': {'key': 'ArchiveStatus', 'type': 'str'}, - 'customer_provided_key_sha256': {'key': 'CustomerProvidedKeySha256', 'type': 'str'}, - 'encryption_scope': {'key': 'EncryptionScope', 'type': 'str'}, - 'access_tier_change_time': {'key': 'AccessTierChangeTime', 'type': 'rfc-1123'}, - 'tag_count': {'key': 'TagCount', 'type': 'int'}, - 'expires_on': {'key': 'Expiry-Time', 'type': 'rfc-1123'}, - 'is_sealed': {'key': 'Sealed', 'type': 'bool'}, - 'rehydrate_priority': {'key': 'RehydratePriority', 'type': 'str'}, - 'last_accessed_on': {'key': 'LastAccessTime', 'type': 'rfc-1123'}, - } - _xml_map = { - 'name': 'Properties' - } - - def __init__( - self, - **kwargs - ): - super(BlobPropertiesInternal, self).__init__(**kwargs) - self.creation_time = kwargs.get('creation_time', None) - self.last_modified = kwargs['last_modified'] - self.etag = kwargs['etag'] - self.content_length = kwargs.get('content_length', None) - self.content_type = kwargs.get('content_type', None) - self.content_encoding = kwargs.get('content_encoding', None) - self.content_language = kwargs.get('content_language', None) - self.content_md5 = kwargs.get('content_md5', None) - self.content_disposition = kwargs.get('content_disposition', None) - self.cache_control = kwargs.get('cache_control', None) - self.blob_sequence_number = kwargs.get('blob_sequence_number', None) - self.blob_type = kwargs.get('blob_type', None) - self.lease_status = kwargs.get('lease_status', None) - self.lease_state = kwargs.get('lease_state', None) - self.lease_duration = kwargs.get('lease_duration', None) - self.copy_id = kwargs.get('copy_id', None) - self.copy_status = kwargs.get('copy_status', None) - self.copy_source = kwargs.get('copy_source', None) - self.copy_progress = kwargs.get('copy_progress', None) - self.copy_completion_time = kwargs.get('copy_completion_time', None) - self.copy_status_description = kwargs.get('copy_status_description', None) - self.server_encrypted = kwargs.get('server_encrypted', None) - self.incremental_copy = kwargs.get('incremental_copy', None) - self.destination_snapshot = kwargs.get('destination_snapshot', None) - self.deleted_time = kwargs.get('deleted_time', None) - self.remaining_retention_days = kwargs.get('remaining_retention_days', None) - self.access_tier = kwargs.get('access_tier', None) - self.access_tier_inferred = kwargs.get('access_tier_inferred', None) - self.archive_status = kwargs.get('archive_status', None) - self.customer_provided_key_sha256 = kwargs.get('customer_provided_key_sha256', None) - self.encryption_scope = kwargs.get('encryption_scope', None) - self.access_tier_change_time = kwargs.get('access_tier_change_time', None) - self.tag_count = kwargs.get('tag_count', None) - self.expires_on = kwargs.get('expires_on', None) - self.is_sealed = kwargs.get('is_sealed', None) - self.rehydrate_priority = kwargs.get('rehydrate_priority', None) - self.last_accessed_on = kwargs.get('last_accessed_on', None) - - -class BlobTag(msrest.serialization.Model): - """BlobTag. - - All required parameters must be populated in order to send to Azure. - - :param key: Required. - :type key: str - :param value: Required. - :type value: str - """ - - _validation = { - 'key': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'key': {'key': 'Key', 'type': 'str'}, - 'value': {'key': 'Value', 'type': 'str'}, - } - _xml_map = { - 'name': 'Tag' - } - - def __init__( - self, - **kwargs - ): - super(BlobTag, self).__init__(**kwargs) - self.key = kwargs['key'] - self.value = kwargs['value'] - - -class BlobTags(msrest.serialization.Model): - """Blob tags. - - All required parameters must be populated in order to send to Azure. - - :param blob_tag_set: Required. - :type blob_tag_set: list[~azure.storage.blob.models.BlobTag] - """ - - _validation = { - 'blob_tag_set': {'required': True}, - } - - _attribute_map = { - 'blob_tag_set': {'key': 'BlobTagSet', 'type': '[BlobTag]', 'xml': {'name': 'TagSet', 'wrapped': True, 'itemsName': 'Tag'}}, - } - _xml_map = { - 'name': 'Tags' - } - - def __init__( - self, - **kwargs - ): - super(BlobTags, self).__init__(**kwargs) - self.blob_tag_set = kwargs['blob_tag_set'] - - -class Block(msrest.serialization.Model): - """Represents a single block in a block blob. It describes the block's ID and size. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. The base64 encoded block ID. - :type name: str - :param size: Required. The block size in bytes. - :type size: int - """ - - _validation = { - 'name': {'required': True}, - 'size': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, - 'size': {'key': 'Size', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(Block, self).__init__(**kwargs) - self.name = kwargs['name'] - self.size = kwargs['size'] - - -class BlockList(msrest.serialization.Model): - """BlockList. - - :param committed_blocks: - :type committed_blocks: list[~azure.storage.blob.models.Block] - :param uncommitted_blocks: - :type uncommitted_blocks: list[~azure.storage.blob.models.Block] - """ - - _attribute_map = { - 'committed_blocks': {'key': 'CommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, - 'uncommitted_blocks': {'key': 'UncommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, - } - - def __init__( - self, - **kwargs - ): - super(BlockList, self).__init__(**kwargs) - self.committed_blocks = kwargs.get('committed_blocks', None) - self.uncommitted_blocks = kwargs.get('uncommitted_blocks', None) - - -class BlockLookupList(msrest.serialization.Model): - """BlockLookupList. - - :param committed: - :type committed: list[str] - :param uncommitted: - :type uncommitted: list[str] - :param latest: - :type latest: list[str] - """ - - _attribute_map = { - 'committed': {'key': 'Committed', 'type': '[str]', 'xml': {'itemsName': 'Committed'}}, - 'uncommitted': {'key': 'Uncommitted', 'type': '[str]', 'xml': {'itemsName': 'Uncommitted'}}, - 'latest': {'key': 'Latest', 'type': '[str]', 'xml': {'itemsName': 'Latest'}}, - } - _xml_map = { - 'name': 'BlockList' - } - - def __init__( - self, - **kwargs - ): - super(BlockLookupList, self).__init__(**kwargs) - self.committed = kwargs.get('committed', None) - self.uncommitted = kwargs.get('uncommitted', None) - self.latest = kwargs.get('latest', None) - - -class ClearRange(msrest.serialization.Model): - """ClearRange. - - All required parameters must be populated in order to send to Azure. - - :param start: Required. - :type start: long - :param end: Required. - :type end: long - """ - - _validation = { - 'start': {'required': True}, - 'end': {'required': True}, - } - - _attribute_map = { - 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, - 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, - } - _xml_map = { - 'name': 'ClearRange' - } - - def __init__( - self, - **kwargs - ): - super(ClearRange, self).__init__(**kwargs) - self.start = kwargs['start'] - self.end = kwargs['end'] - - -class ContainerCpkScopeInfo(msrest.serialization.Model): - """Parameter group. - - :param default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the - default encryption scope to set on the container and use for all future writes. - :type default_encryption_scope: str - :param prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, - prevents any request from specifying a different encryption scope than the scope set on the - container. - :type prevent_encryption_scope_override: bool - """ - - _attribute_map = { - 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, - 'prevent_encryption_scope_override': {'key': 'PreventEncryptionScopeOverride', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(ContainerCpkScopeInfo, self).__init__(**kwargs) - self.default_encryption_scope = kwargs.get('default_encryption_scope', None) - self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', None) - - -class ContainerItem(msrest.serialization.Model): - """An Azure Storage container. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. - :type name: str - :param deleted: - :type deleted: bool - :param version: - :type version: str - :param properties: Required. Properties of a container. - :type properties: ~azure.storage.blob.models.ContainerProperties - :param metadata: Dictionary of :code:``. - :type metadata: dict[str, str] - """ - - _validation = { - 'name': {'required': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, - 'deleted': {'key': 'Deleted', 'type': 'bool'}, - 'version': {'key': 'Version', 'type': 'str'}, - 'properties': {'key': 'Properties', 'type': 'ContainerProperties'}, - 'metadata': {'key': 'Metadata', 'type': '{str}'}, - } - _xml_map = { - 'name': 'Container' - } - - def __init__( - self, - **kwargs - ): - super(ContainerItem, self).__init__(**kwargs) - self.name = kwargs['name'] - self.deleted = kwargs.get('deleted', None) - self.version = kwargs.get('version', None) - self.properties = kwargs['properties'] - self.metadata = kwargs.get('metadata', None) - - -class ContainerProperties(msrest.serialization.Model): - """Properties of a container. - - All required parameters must be populated in order to send to Azure. - - :param last_modified: Required. - :type last_modified: ~datetime.datetime - :param etag: Required. - :type etag: str - :param lease_status: Possible values include: "locked", "unlocked". - :type lease_status: str or ~azure.storage.blob.models.LeaseStatusType - :param lease_state: Possible values include: "available", "leased", "expired", "breaking", - "broken". - :type lease_state: str or ~azure.storage.blob.models.LeaseStateType - :param lease_duration: Possible values include: "infinite", "fixed". - :type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType - :param public_access: Possible values include: "container", "blob". - :type public_access: str or ~azure.storage.blob.models.PublicAccessType - :param has_immutability_policy: - :type has_immutability_policy: bool - :param has_legal_hold: - :type has_legal_hold: bool - :param default_encryption_scope: - :type default_encryption_scope: str - :param prevent_encryption_scope_override: - :type prevent_encryption_scope_override: bool - :param deleted_time: - :type deleted_time: ~datetime.datetime - :param remaining_retention_days: - :type remaining_retention_days: int - """ - - _validation = { - 'last_modified': {'required': True}, - 'etag': {'required': True}, - } - - _attribute_map = { - 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, - 'etag': {'key': 'Etag', 'type': 'str'}, - 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, - 'lease_state': {'key': 'LeaseState', 'type': 'str'}, - 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, - 'public_access': {'key': 'PublicAccess', 'type': 'str'}, - 'has_immutability_policy': {'key': 'HasImmutabilityPolicy', 'type': 'bool'}, - 'has_legal_hold': {'key': 'HasLegalHold', 'type': 'bool'}, - 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, - 'prevent_encryption_scope_override': {'key': 'DenyEncryptionScopeOverride', 'type': 'bool'}, - 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, - 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(ContainerProperties, self).__init__(**kwargs) - self.last_modified = kwargs['last_modified'] - self.etag = kwargs['etag'] - self.lease_status = kwargs.get('lease_status', None) - self.lease_state = kwargs.get('lease_state', None) - self.lease_duration = kwargs.get('lease_duration', None) - self.public_access = kwargs.get('public_access', None) - self.has_immutability_policy = kwargs.get('has_immutability_policy', None) - self.has_legal_hold = kwargs.get('has_legal_hold', None) - self.default_encryption_scope = kwargs.get('default_encryption_scope', None) - self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', None) - self.deleted_time = kwargs.get('deleted_time', None) - self.remaining_retention_days = kwargs.get('remaining_retention_days', None) - - -class CorsRule(msrest.serialization.Model): - """CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain. - - All required parameters must be populated in order to send to Azure. - - :param allowed_origins: Required. The origin domains that are permitted to make a request - against the storage service via CORS. The origin domain is the domain from which the request - originates. Note that the origin must be an exact case-sensitive match with the origin that the - user age sends to the service. You can also use the wildcard character '*' to allow all origin - domains to make requests via CORS. - :type allowed_origins: str - :param allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may - use for a CORS request. (comma separated). - :type allowed_methods: str - :param allowed_headers: Required. the request headers that the origin domain may specify on the - CORS request. - :type allowed_headers: str - :param exposed_headers: Required. The response headers that may be sent in the response to the - CORS request and exposed by the browser to the request issuer. - :type exposed_headers: str - :param max_age_in_seconds: Required. The maximum amount time that a browser should cache the - preflight OPTIONS request. - :type max_age_in_seconds: int - """ - - _validation = { - 'allowed_origins': {'required': True}, - 'allowed_methods': {'required': True}, - 'allowed_headers': {'required': True}, - 'exposed_headers': {'required': True}, - 'max_age_in_seconds': {'required': True, 'minimum': 0}, - } - - _attribute_map = { - 'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str'}, - 'allowed_methods': {'key': 'AllowedMethods', 'type': 'str'}, - 'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str'}, - 'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str'}, - 'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(CorsRule, self).__init__(**kwargs) - self.allowed_origins = kwargs['allowed_origins'] - self.allowed_methods = kwargs['allowed_methods'] - self.allowed_headers = kwargs['allowed_headers'] - self.exposed_headers = kwargs['exposed_headers'] - self.max_age_in_seconds = kwargs['max_age_in_seconds'] - - -class CpkInfo(msrest.serialization.Model): - """Parameter group. - - :param encryption_key: Optional. Specifies the encryption key to use to encrypt the data - provided in the request. If not specified, encryption is performed with the root account - encryption key. For more information, see Encryption at Rest for Azure Storage Services. - :type encryption_key: str - :param encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be provided - if the x-ms-encryption-key header is provided. - :type encryption_key_sha256: str - """ - - _attribute_map = { - 'encryption_key': {'key': 'encryptionKey', 'type': 'str'}, - 'encryption_key_sha256': {'key': 'encryptionKeySha256', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CpkInfo, self).__init__(**kwargs) - self.encryption_key = kwargs.get('encryption_key', None) - self.encryption_key_sha256 = kwargs.get('encryption_key_sha256', None) - - -class CpkScopeInfo(msrest.serialization.Model): - """Parameter group. - - :param encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the - encryption scope to use to encrypt the data provided in the request. If not specified, - encryption is performed with the default account encryption scope. For more information, see - Encryption at Rest for Azure Storage Services. - :type encryption_scope: str - """ - - _attribute_map = { - 'encryption_scope': {'key': 'encryptionScope', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CpkScopeInfo, self).__init__(**kwargs) - self.encryption_scope = kwargs.get('encryption_scope', None) - - -class DataLakeStorageError(msrest.serialization.Model): - """DataLakeStorageError. - - :param data_lake_storage_error_details: The service error response object. - :type data_lake_storage_error_details: ~azure.storage.blob.models.DataLakeStorageErrorDetails - """ - - _attribute_map = { - 'data_lake_storage_error_details': {'key': 'error', 'type': 'DataLakeStorageErrorDetails'}, - } - - def __init__( - self, - **kwargs - ): - super(DataLakeStorageError, self).__init__(**kwargs) - self.data_lake_storage_error_details = kwargs.get('data_lake_storage_error_details', None) - - -class DataLakeStorageErrorDetails(msrest.serialization.Model): - """The service error response object. - - :param code: The service error code. - :type code: str - :param message: The service error message. - :type message: str - """ - - _attribute_map = { - 'code': {'key': 'Code', 'type': 'str'}, - 'message': {'key': 'Message', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataLakeStorageErrorDetails, self).__init__(**kwargs) - self.code = kwargs.get('code', None) - self.message = kwargs.get('message', None) - - -class DelimitedTextConfiguration(msrest.serialization.Model): - """delimited text configuration. - - All required parameters must be populated in order to send to Azure. - - :param column_separator: Required. column separator. - :type column_separator: str - :param field_quote: Required. field quote. - :type field_quote: str - :param record_separator: Required. record separator. - :type record_separator: str - :param escape_char: Required. escape char. - :type escape_char: str - :param headers_present: Required. has headers. - :type headers_present: bool - """ - - _validation = { - 'column_separator': {'required': True}, - 'field_quote': {'required': True}, - 'record_separator': {'required': True}, - 'escape_char': {'required': True}, - 'headers_present': {'required': True}, - } - - _attribute_map = { - 'column_separator': {'key': 'ColumnSeparator', 'type': 'str', 'xml': {'name': 'ColumnSeparator'}}, - 'field_quote': {'key': 'FieldQuote', 'type': 'str', 'xml': {'name': 'FieldQuote'}}, - 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, - 'escape_char': {'key': 'EscapeChar', 'type': 'str', 'xml': {'name': 'EscapeChar'}}, - 'headers_present': {'key': 'HeadersPresent', 'type': 'bool', 'xml': {'name': 'HasHeaders'}}, - } - _xml_map = { - 'name': 'DelimitedTextConfiguration' - } - - def __init__( - self, - **kwargs - ): - super(DelimitedTextConfiguration, self).__init__(**kwargs) - self.column_separator = kwargs['column_separator'] - self.field_quote = kwargs['field_quote'] - self.record_separator = kwargs['record_separator'] - self.escape_char = kwargs['escape_char'] - self.headers_present = kwargs['headers_present'] - - -class DirectoryHttpHeaders(msrest.serialization.Model): - """Parameter group. - - :param cache_control: Cache control for given resource. - :type cache_control: str - :param content_type: Content type for given resource. - :type content_type: str - :param content_encoding: Content encoding for given resource. - :type content_encoding: str - :param content_language: Content language for given resource. - :type content_language: str - :param content_disposition: Content disposition for given resource. - :type content_disposition: str - """ - - _attribute_map = { - 'cache_control': {'key': 'cacheControl', 'type': 'str'}, - 'content_type': {'key': 'contentType', 'type': 'str'}, - 'content_encoding': {'key': 'contentEncoding', 'type': 'str'}, - 'content_language': {'key': 'contentLanguage', 'type': 'str'}, - 'content_disposition': {'key': 'contentDisposition', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DirectoryHttpHeaders, self).__init__(**kwargs) - self.cache_control = kwargs.get('cache_control', None) - self.content_type = kwargs.get('content_type', None) - self.content_encoding = kwargs.get('content_encoding', None) - self.content_language = kwargs.get('content_language', None) - self.content_disposition = kwargs.get('content_disposition', None) - - -class FilterBlobItem(msrest.serialization.Model): - """Blob info from a Filter Blobs API call. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. - :type name: str - :param container_name: Required. - :type container_name: str - :param tags: A set of tags. Blob tags. - :type tags: ~azure.storage.blob.models.BlobTags - """ - - _validation = { - 'name': {'required': True}, - 'container_name': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, - 'container_name': {'key': 'ContainerName', 'type': 'str'}, - 'tags': {'key': 'Tags', 'type': 'BlobTags'}, - } - _xml_map = { - 'name': 'Blob' - } - - def __init__( - self, - **kwargs - ): - super(FilterBlobItem, self).__init__(**kwargs) - self.name = kwargs['name'] - self.container_name = kwargs['container_name'] - self.tags = kwargs.get('tags', None) - - -class FilterBlobSegment(msrest.serialization.Model): - """The result of a Filter Blobs API call. - - All required parameters must be populated in order to send to Azure. - - :param service_endpoint: Required. - :type service_endpoint: str - :param where: Required. - :type where: str - :param blobs: Required. - :type blobs: list[~azure.storage.blob.models.FilterBlobItem] - :param next_marker: - :type next_marker: str - """ - - _validation = { - 'service_endpoint': {'required': True}, - 'where': {'required': True}, - 'blobs': {'required': True}, - } - - _attribute_map = { - 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, - 'where': {'key': 'Where', 'type': 'str'}, - 'blobs': {'key': 'Blobs', 'type': '[FilterBlobItem]', 'xml': {'name': 'Blobs', 'wrapped': True, 'itemsName': 'Blob'}}, - 'next_marker': {'key': 'NextMarker', 'type': 'str'}, - } - _xml_map = { - 'name': 'EnumerationResults' - } - - def __init__( - self, - **kwargs - ): - super(FilterBlobSegment, self).__init__(**kwargs) - self.service_endpoint = kwargs['service_endpoint'] - self.where = kwargs['where'] - self.blobs = kwargs['blobs'] - self.next_marker = kwargs.get('next_marker', None) - - -class GeoReplication(msrest.serialization.Model): - """Geo-Replication information for the Secondary Storage Service. - - All required parameters must be populated in order to send to Azure. - - :param status: Required. The status of the secondary location. Possible values include: "live", - "bootstrap", "unavailable". - :type status: str or ~azure.storage.blob.models.GeoReplicationStatusType - :param last_sync_time: Required. A GMT date/time value, to the second. All primary writes - preceding this value are guaranteed to be available for read operations at the secondary. - Primary writes after this point in time may or may not be available for reads. - :type last_sync_time: ~datetime.datetime - """ - - _validation = { - 'status': {'required': True}, - 'last_sync_time': {'required': True}, - } - - _attribute_map = { - 'status': {'key': 'Status', 'type': 'str'}, - 'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123'}, - } - - def __init__( - self, - **kwargs - ): - super(GeoReplication, self).__init__(**kwargs) - self.status = kwargs['status'] - self.last_sync_time = kwargs['last_sync_time'] - - -class JsonTextConfiguration(msrest.serialization.Model): - """json text configuration. - - All required parameters must be populated in order to send to Azure. - - :param record_separator: Required. record separator. - :type record_separator: str - """ - - _validation = { - 'record_separator': {'required': True}, - } - - _attribute_map = { - 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, - } - _xml_map = { - 'name': 'JsonTextConfiguration' - } - - def __init__( - self, - **kwargs - ): - super(JsonTextConfiguration, self).__init__(**kwargs) - self.record_separator = kwargs['record_separator'] - - -class KeyInfo(msrest.serialization.Model): - """Key information. - - All required parameters must be populated in order to send to Azure. - - :param start: Required. The date-time the key is active in ISO 8601 UTC time. - :type start: str - :param expiry: Required. The date-time the key expires in ISO 8601 UTC time. - :type expiry: str - """ - - _validation = { - 'start': {'required': True}, - 'expiry': {'required': True}, - } - - _attribute_map = { - 'start': {'key': 'Start', 'type': 'str'}, - 'expiry': {'key': 'Expiry', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(KeyInfo, self).__init__(**kwargs) - self.start = kwargs['start'] - self.expiry = kwargs['expiry'] - - -class LeaseAccessConditions(msrest.serialization.Model): - """Parameter group. - - :param lease_id: If specified, the operation only succeeds if the resource's lease is active - and matches this ID. - :type lease_id: str - """ - - _attribute_map = { - 'lease_id': {'key': 'leaseId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(LeaseAccessConditions, self).__init__(**kwargs) - self.lease_id = kwargs.get('lease_id', None) - - -class ListBlobsFlatSegmentResponse(msrest.serialization.Model): - """An enumeration of blobs. - - All required parameters must be populated in order to send to Azure. - - :param service_endpoint: Required. - :type service_endpoint: str - :param container_name: Required. - :type container_name: str - :param prefix: - :type prefix: str - :param marker: - :type marker: str - :param max_results: - :type max_results: int - :param segment: Required. - :type segment: ~azure.storage.blob.models.BlobFlatListSegment - :param next_marker: - :type next_marker: str - """ - - _validation = { - 'service_endpoint': {'required': True}, - 'container_name': {'required': True}, - 'segment': {'required': True}, - } - - _attribute_map = { - 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, - 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, - 'prefix': {'key': 'Prefix', 'type': 'str'}, - 'marker': {'key': 'Marker', 'type': 'str'}, - 'max_results': {'key': 'MaxResults', 'type': 'int'}, - 'segment': {'key': 'Segment', 'type': 'BlobFlatListSegment'}, - 'next_marker': {'key': 'NextMarker', 'type': 'str'}, - } - _xml_map = { - 'name': 'EnumerationResults' - } - - def __init__( - self, - **kwargs - ): - super(ListBlobsFlatSegmentResponse, self).__init__(**kwargs) - self.service_endpoint = kwargs['service_endpoint'] - self.container_name = kwargs['container_name'] - self.prefix = kwargs.get('prefix', None) - self.marker = kwargs.get('marker', None) - self.max_results = kwargs.get('max_results', None) - self.segment = kwargs['segment'] - self.next_marker = kwargs.get('next_marker', None) - - -class ListBlobsHierarchySegmentResponse(msrest.serialization.Model): - """An enumeration of blobs. - - All required parameters must be populated in order to send to Azure. - - :param service_endpoint: Required. - :type service_endpoint: str - :param container_name: Required. - :type container_name: str - :param prefix: - :type prefix: str - :param marker: - :type marker: str - :param max_results: - :type max_results: int - :param delimiter: - :type delimiter: str - :param segment: Required. - :type segment: ~azure.storage.blob.models.BlobHierarchyListSegment - :param next_marker: - :type next_marker: str - """ - - _validation = { - 'service_endpoint': {'required': True}, - 'container_name': {'required': True}, - 'segment': {'required': True}, - } - - _attribute_map = { - 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, - 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, - 'prefix': {'key': 'Prefix', 'type': 'str'}, - 'marker': {'key': 'Marker', 'type': 'str'}, - 'max_results': {'key': 'MaxResults', 'type': 'int'}, - 'delimiter': {'key': 'Delimiter', 'type': 'str'}, - 'segment': {'key': 'Segment', 'type': 'BlobHierarchyListSegment'}, - 'next_marker': {'key': 'NextMarker', 'type': 'str'}, - } - _xml_map = { - 'name': 'EnumerationResults' - } - - def __init__( - self, - **kwargs - ): - super(ListBlobsHierarchySegmentResponse, self).__init__(**kwargs) - self.service_endpoint = kwargs['service_endpoint'] - self.container_name = kwargs['container_name'] - self.prefix = kwargs.get('prefix', None) - self.marker = kwargs.get('marker', None) - self.max_results = kwargs.get('max_results', None) - self.delimiter = kwargs.get('delimiter', None) - self.segment = kwargs['segment'] - self.next_marker = kwargs.get('next_marker', None) - - -class ListContainersSegmentResponse(msrest.serialization.Model): - """An enumeration of containers. - - All required parameters must be populated in order to send to Azure. - - :param service_endpoint: Required. - :type service_endpoint: str - :param prefix: - :type prefix: str - :param marker: - :type marker: str - :param max_results: - :type max_results: int - :param container_items: Required. - :type container_items: list[~azure.storage.blob.models.ContainerItem] - :param next_marker: - :type next_marker: str - """ - - _validation = { - 'service_endpoint': {'required': True}, - 'container_items': {'required': True}, - } - - _attribute_map = { - 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, - 'prefix': {'key': 'Prefix', 'type': 'str'}, - 'marker': {'key': 'Marker', 'type': 'str'}, - 'max_results': {'key': 'MaxResults', 'type': 'int'}, - 'container_items': {'key': 'ContainerItems', 'type': '[ContainerItem]', 'xml': {'name': 'Containers', 'wrapped': True, 'itemsName': 'Container'}}, - 'next_marker': {'key': 'NextMarker', 'type': 'str'}, - } - _xml_map = { - 'name': 'EnumerationResults' - } - - def __init__( - self, - **kwargs - ): - super(ListContainersSegmentResponse, self).__init__(**kwargs) - self.service_endpoint = kwargs['service_endpoint'] - self.prefix = kwargs.get('prefix', None) - self.marker = kwargs.get('marker', None) - self.max_results = kwargs.get('max_results', None) - self.container_items = kwargs['container_items'] - self.next_marker = kwargs.get('next_marker', None) - - -class Logging(msrest.serialization.Model): - """Azure Analytics Logging settings. - - All required parameters must be populated in order to send to Azure. - - :param version: Required. The version of Storage Analytics to configure. - :type version: str - :param delete: Required. Indicates whether all delete requests should be logged. - :type delete: bool - :param read: Required. Indicates whether all read requests should be logged. - :type read: bool - :param write: Required. Indicates whether all write requests should be logged. - :type write: bool - :param retention_policy: Required. the retention policy which determines how long the - associated data should persist. - :type retention_policy: ~azure.storage.blob.models.RetentionPolicy - """ - - _validation = { - 'version': {'required': True}, - 'delete': {'required': True}, - 'read': {'required': True}, - 'write': {'required': True}, - 'retention_policy': {'required': True}, - } - - _attribute_map = { - 'version': {'key': 'Version', 'type': 'str'}, - 'delete': {'key': 'Delete', 'type': 'bool'}, - 'read': {'key': 'Read', 'type': 'bool'}, - 'write': {'key': 'Write', 'type': 'bool'}, - 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, - } - - def __init__( - self, - **kwargs - ): - super(Logging, self).__init__(**kwargs) - self.version = kwargs['version'] - self.delete = kwargs['delete'] - self.read = kwargs['read'] - self.write = kwargs['write'] - self.retention_policy = kwargs['retention_policy'] - - -class Metrics(msrest.serialization.Model): - """a summary of request statistics grouped by API in hour or minute aggregates for blobs. - - All required parameters must be populated in order to send to Azure. - - :param version: The version of Storage Analytics to configure. - :type version: str - :param enabled: Required. Indicates whether metrics are enabled for the Blob service. - :type enabled: bool - :param include_apis: Indicates whether metrics should generate summary statistics for called - API operations. - :type include_apis: bool - :param retention_policy: the retention policy which determines how long the associated data - should persist. - :type retention_policy: ~azure.storage.blob.models.RetentionPolicy - """ - - _validation = { - 'enabled': {'required': True}, - } - - _attribute_map = { - 'version': {'key': 'Version', 'type': 'str'}, - 'enabled': {'key': 'Enabled', 'type': 'bool'}, - 'include_apis': {'key': 'IncludeAPIs', 'type': 'bool'}, - 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, - } - - def __init__( - self, - **kwargs - ): - super(Metrics, self).__init__(**kwargs) - self.version = kwargs.get('version', None) - self.enabled = kwargs['enabled'] - self.include_apis = kwargs.get('include_apis', None) - self.retention_policy = kwargs.get('retention_policy', None) - - -class ModifiedAccessConditions(msrest.serialization.Model): - """Parameter group. - - :param if_modified_since: Specify this header value to operate only on a blob if it has been - modified since the specified date/time. - :type if_modified_since: ~datetime.datetime - :param if_unmodified_since: Specify this header value to operate only on a blob if it has not - been modified since the specified date/time. - :type if_unmodified_since: ~datetime.datetime - :param if_match: Specify an ETag value to operate only on blobs with a matching value. - :type if_match: str - :param if_none_match: Specify an ETag value to operate only on blobs without a matching value. - :type if_none_match: str - :param if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a - matching value. - :type if_tags: str - """ - - _attribute_map = { - 'if_modified_since': {'key': 'ifModifiedSince', 'type': 'rfc-1123'}, - 'if_unmodified_since': {'key': 'ifUnmodifiedSince', 'type': 'rfc-1123'}, - 'if_match': {'key': 'ifMatch', 'type': 'str'}, - 'if_none_match': {'key': 'ifNoneMatch', 'type': 'str'}, - 'if_tags': {'key': 'ifTags', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ModifiedAccessConditions, self).__init__(**kwargs) - self.if_modified_since = kwargs.get('if_modified_since', None) - self.if_unmodified_since = kwargs.get('if_unmodified_since', None) - self.if_match = kwargs.get('if_match', None) - self.if_none_match = kwargs.get('if_none_match', None) - self.if_tags = kwargs.get('if_tags', None) - - -class PageList(msrest.serialization.Model): - """the list of pages. - - :param page_range: - :type page_range: list[~azure.storage.blob.models.PageRange] - :param clear_range: - :type clear_range: list[~azure.storage.blob.models.ClearRange] - """ - - _attribute_map = { - 'page_range': {'key': 'PageRange', 'type': '[PageRange]'}, - 'clear_range': {'key': 'ClearRange', 'type': '[ClearRange]'}, - } - - def __init__( - self, - **kwargs - ): - super(PageList, self).__init__(**kwargs) - self.page_range = kwargs.get('page_range', None) - self.clear_range = kwargs.get('clear_range', None) - - -class PageRange(msrest.serialization.Model): - """PageRange. - - All required parameters must be populated in order to send to Azure. - - :param start: Required. - :type start: long - :param end: Required. - :type end: long - """ - - _validation = { - 'start': {'required': True}, - 'end': {'required': True}, - } - - _attribute_map = { - 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, - 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, - } - _xml_map = { - 'name': 'PageRange' - } - - def __init__( - self, - **kwargs - ): - super(PageRange, self).__init__(**kwargs) - self.start = kwargs['start'] - self.end = kwargs['end'] - - -class QueryFormat(msrest.serialization.Model): - """QueryFormat. - - :param type: The quick query format type. Possible values include: "delimited", "json", - "arrow". - :type type: str or ~azure.storage.blob.models.QueryFormatType - :param delimited_text_configuration: delimited text configuration. - :type delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration - :param json_text_configuration: json text configuration. - :type json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration - :param arrow_configuration: arrow configuration. - :type arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration - """ - - _attribute_map = { - 'type': {'key': 'Type', 'type': 'str', 'xml': {'name': 'Type'}}, - 'delimited_text_configuration': {'key': 'DelimitedTextConfiguration', 'type': 'DelimitedTextConfiguration'}, - 'json_text_configuration': {'key': 'JsonTextConfiguration', 'type': 'JsonTextConfiguration'}, - 'arrow_configuration': {'key': 'ArrowConfiguration', 'type': 'ArrowConfiguration'}, - } - - def __init__( - self, - **kwargs - ): - super(QueryFormat, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.delimited_text_configuration = kwargs.get('delimited_text_configuration', None) - self.json_text_configuration = kwargs.get('json_text_configuration', None) - self.arrow_configuration = kwargs.get('arrow_configuration', None) - - -class QueryRequest(msrest.serialization.Model): - """the quick query body. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar query_type: Required. the query type. Default value: "SQL". - :vartype query_type: str - :param expression: Required. a query statement. - :type expression: str - :param input_serialization: - :type input_serialization: ~azure.storage.blob.models.QuerySerialization - :param output_serialization: - :type output_serialization: ~azure.storage.blob.models.QuerySerialization - """ - - _validation = { - 'query_type': {'required': True, 'constant': True}, - 'expression': {'required': True}, - } - - _attribute_map = { - 'query_type': {'key': 'QueryType', 'type': 'str', 'xml': {'name': 'QueryType'}}, - 'expression': {'key': 'Expression', 'type': 'str', 'xml': {'name': 'Expression'}}, - 'input_serialization': {'key': 'InputSerialization', 'type': 'QuerySerialization'}, - 'output_serialization': {'key': 'OutputSerialization', 'type': 'QuerySerialization'}, - } - _xml_map = { - 'name': 'QueryRequest' - } - - query_type = "SQL" - - def __init__( - self, - **kwargs - ): - super(QueryRequest, self).__init__(**kwargs) - self.expression = kwargs['expression'] - self.input_serialization = kwargs.get('input_serialization', None) - self.output_serialization = kwargs.get('output_serialization', None) - - -class QuerySerialization(msrest.serialization.Model): - """QuerySerialization. - - All required parameters must be populated in order to send to Azure. - - :param format: Required. - :type format: ~azure.storage.blob.models.QueryFormat - """ - - _validation = { - 'format': {'required': True}, - } - - _attribute_map = { - 'format': {'key': 'Format', 'type': 'QueryFormat'}, - } - - def __init__( - self, - **kwargs - ): - super(QuerySerialization, self).__init__(**kwargs) - self.format = kwargs['format'] - - -class RetentionPolicy(msrest.serialization.Model): - """the retention policy which determines how long the associated data should persist. - - All required parameters must be populated in order to send to Azure. - - :param enabled: Required. Indicates whether a retention policy is enabled for the storage - service. - :type enabled: bool - :param days: Indicates the number of days that metrics or logging or soft-deleted data should - be retained. All data older than this value will be deleted. - :type days: int - :param allow_permanent_delete: Indicates whether permanent delete is allowed on this storage - account. - :type allow_permanent_delete: bool - """ - - _validation = { - 'enabled': {'required': True}, - 'days': {'minimum': 1}, - } - - _attribute_map = { - 'enabled': {'key': 'Enabled', 'type': 'bool'}, - 'days': {'key': 'Days', 'type': 'int'}, - 'allow_permanent_delete': {'key': 'AllowPermanentDelete', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(RetentionPolicy, self).__init__(**kwargs) - self.enabled = kwargs['enabled'] - self.days = kwargs.get('days', None) - self.allow_permanent_delete = kwargs.get('allow_permanent_delete', None) - - -class SequenceNumberAccessConditions(msrest.serialization.Model): - """Parameter group. - - :param if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on a - blob if it has a sequence number less than or equal to the specified. - :type if_sequence_number_less_than_or_equal_to: long - :param if_sequence_number_less_than: Specify this header value to operate only on a blob if it - has a sequence number less than the specified. - :type if_sequence_number_less_than: long - :param if_sequence_number_equal_to: Specify this header value to operate only on a blob if it - has the specified sequence number. - :type if_sequence_number_equal_to: long - """ - - _attribute_map = { - 'if_sequence_number_less_than_or_equal_to': {'key': 'ifSequenceNumberLessThanOrEqualTo', 'type': 'long'}, - 'if_sequence_number_less_than': {'key': 'ifSequenceNumberLessThan', 'type': 'long'}, - 'if_sequence_number_equal_to': {'key': 'ifSequenceNumberEqualTo', 'type': 'long'}, - } - - def __init__( - self, - **kwargs - ): - super(SequenceNumberAccessConditions, self).__init__(**kwargs) - self.if_sequence_number_less_than_or_equal_to = kwargs.get('if_sequence_number_less_than_or_equal_to', None) - self.if_sequence_number_less_than = kwargs.get('if_sequence_number_less_than', None) - self.if_sequence_number_equal_to = kwargs.get('if_sequence_number_equal_to', None) - - -class SignedIdentifier(msrest.serialization.Model): - """signed identifier. - - All required parameters must be populated in order to send to Azure. - - :param id: Required. a unique id. - :type id: str - :param access_policy: An Access policy. - :type access_policy: ~azure.storage.blob.models.AccessPolicy - """ - - _validation = { - 'id': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'Id', 'type': 'str'}, - 'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy'}, - } - _xml_map = { - 'name': 'SignedIdentifier' - } - - def __init__( - self, - **kwargs - ): - super(SignedIdentifier, self).__init__(**kwargs) - self.id = kwargs['id'] - self.access_policy = kwargs.get('access_policy', None) - - -class SourceModifiedAccessConditions(msrest.serialization.Model): - """Parameter group. - - :param source_if_modified_since: Specify this header value to operate only on a blob if it has - been modified since the specified date/time. - :type source_if_modified_since: ~datetime.datetime - :param source_if_unmodified_since: Specify this header value to operate only on a blob if it - has not been modified since the specified date/time. - :type source_if_unmodified_since: ~datetime.datetime - :param source_if_match: Specify an ETag value to operate only on blobs with a matching value. - :type source_if_match: str - :param source_if_none_match: Specify an ETag value to operate only on blobs without a matching - value. - :type source_if_none_match: str - :param source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a - matching value. - :type source_if_tags: str - """ - - _attribute_map = { - 'source_if_modified_since': {'key': 'sourceIfModifiedSince', 'type': 'rfc-1123'}, - 'source_if_unmodified_since': {'key': 'sourceIfUnmodifiedSince', 'type': 'rfc-1123'}, - 'source_if_match': {'key': 'sourceIfMatch', 'type': 'str'}, - 'source_if_none_match': {'key': 'sourceIfNoneMatch', 'type': 'str'}, - 'source_if_tags': {'key': 'sourceIfTags', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SourceModifiedAccessConditions, self).__init__(**kwargs) - self.source_if_modified_since = kwargs.get('source_if_modified_since', None) - self.source_if_unmodified_since = kwargs.get('source_if_unmodified_since', None) - self.source_if_match = kwargs.get('source_if_match', None) - self.source_if_none_match = kwargs.get('source_if_none_match', None) - self.source_if_tags = kwargs.get('source_if_tags', None) - - -class StaticWebsite(msrest.serialization.Model): - """The properties that enable an account to host a static website. - - All required parameters must be populated in order to send to Azure. - - :param enabled: Required. Indicates whether this account is hosting a static website. - :type enabled: bool - :param index_document: The default name of the index page under each directory. - :type index_document: str - :param error_document404_path: The absolute path of the custom 404 page. - :type error_document404_path: str - :param default_index_document_path: Absolute path of the default index page. - :type default_index_document_path: str - """ - - _validation = { - 'enabled': {'required': True}, - } - - _attribute_map = { - 'enabled': {'key': 'Enabled', 'type': 'bool'}, - 'index_document': {'key': 'IndexDocument', 'type': 'str'}, - 'error_document404_path': {'key': 'ErrorDocument404Path', 'type': 'str'}, - 'default_index_document_path': {'key': 'DefaultIndexDocumentPath', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(StaticWebsite, self).__init__(**kwargs) - self.enabled = kwargs['enabled'] - self.index_document = kwargs.get('index_document', None) - self.error_document404_path = kwargs.get('error_document404_path', None) - self.default_index_document_path = kwargs.get('default_index_document_path', None) - - -class StorageError(msrest.serialization.Model): - """StorageError. - - :param message: - :type message: str - """ - - _attribute_map = { - 'message': {'key': 'Message', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(StorageError, self).__init__(**kwargs) - self.message = kwargs.get('message', None) - - -class StorageServiceProperties(msrest.serialization.Model): - """Storage Service Properties. - - :param logging: Azure Analytics Logging settings. - :type logging: ~azure.storage.blob.models.Logging - :param hour_metrics: a summary of request statistics grouped by API in hour or minute - aggregates for blobs. - :type hour_metrics: ~azure.storage.blob.models.Metrics - :param minute_metrics: a summary of request statistics grouped by API in hour or minute - aggregates for blobs. - :type minute_metrics: ~azure.storage.blob.models.Metrics - :param cors: The set of CORS rules. - :type cors: list[~azure.storage.blob.models.CorsRule] - :param default_service_version: The default version to use for requests to the Blob service if - an incoming request's version is not specified. Possible values include version 2008-10-27 and - all more recent versions. - :type default_service_version: str - :param delete_retention_policy: the retention policy which determines how long the associated - data should persist. - :type delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy - :param static_website: The properties that enable an account to host a static website. - :type static_website: ~azure.storage.blob.models.StaticWebsite - """ - - _attribute_map = { - 'logging': {'key': 'Logging', 'type': 'Logging'}, - 'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics'}, - 'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics'}, - 'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'wrapped': True}}, - 'default_service_version': {'key': 'DefaultServiceVersion', 'type': 'str'}, - 'delete_retention_policy': {'key': 'DeleteRetentionPolicy', 'type': 'RetentionPolicy'}, - 'static_website': {'key': 'StaticWebsite', 'type': 'StaticWebsite'}, - } - - def __init__( - self, - **kwargs - ): - super(StorageServiceProperties, self).__init__(**kwargs) - self.logging = kwargs.get('logging', None) - self.hour_metrics = kwargs.get('hour_metrics', None) - self.minute_metrics = kwargs.get('minute_metrics', None) - self.cors = kwargs.get('cors', None) - self.default_service_version = kwargs.get('default_service_version', None) - self.delete_retention_policy = kwargs.get('delete_retention_policy', None) - self.static_website = kwargs.get('static_website', None) - - -class StorageServiceStats(msrest.serialization.Model): - """Stats for the storage service. - - :param geo_replication: Geo-Replication information for the Secondary Storage Service. - :type geo_replication: ~azure.storage.blob.models.GeoReplication - """ - - _attribute_map = { - 'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication'}, - } - - def __init__( - self, - **kwargs - ): - super(StorageServiceStats, self).__init__(**kwargs) - self.geo_replication = kwargs.get('geo_replication', None) - - -class UserDelegationKey(msrest.serialization.Model): - """A user delegation key. - - All required parameters must be populated in order to send to Azure. - - :param signed_oid: Required. The Azure Active Directory object ID in GUID format. - :type signed_oid: str - :param signed_tid: Required. The Azure Active Directory tenant ID in GUID format. - :type signed_tid: str - :param signed_start: Required. The date-time the key is active. - :type signed_start: ~datetime.datetime - :param signed_expiry: Required. The date-time the key expires. - :type signed_expiry: ~datetime.datetime - :param signed_service: Required. Abbreviation of the Azure Storage service that accepts the - key. - :type signed_service: str - :param signed_version: Required. The service version that created the key. - :type signed_version: str - :param value: Required. The key as a base64 string. - :type value: str - """ - - _validation = { - 'signed_oid': {'required': True}, - 'signed_tid': {'required': True}, - 'signed_start': {'required': True}, - 'signed_expiry': {'required': True}, - 'signed_service': {'required': True}, - 'signed_version': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'signed_oid': {'key': 'SignedOid', 'type': 'str'}, - 'signed_tid': {'key': 'SignedTid', 'type': 'str'}, - 'signed_start': {'key': 'SignedStart', 'type': 'iso-8601'}, - 'signed_expiry': {'key': 'SignedExpiry', 'type': 'iso-8601'}, - 'signed_service': {'key': 'SignedService', 'type': 'str'}, - 'signed_version': {'key': 'SignedVersion', 'type': 'str'}, - 'value': {'key': 'Value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(UserDelegationKey, self).__init__(**kwargs) - self.signed_oid = kwargs['signed_oid'] - self.signed_tid = kwargs['signed_tid'] - self.signed_start = kwargs['signed_start'] - self.signed_expiry = kwargs['signed_expiry'] - self.signed_service = kwargs['signed_service'] - self.signed_version = kwargs['signed_version'] - self.value = kwargs['value'] diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_models_py3.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_models_py3.py index b1339f08fbdc..cd88cb20487f 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_models_py3.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_models_py3.py @@ -1,4 +1,5 @@ # coding=utf-8 +# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. @@ -7,29 +8,37 @@ # -------------------------------------------------------------------------- import datetime -from typing import Dict, List, Optional, Union +import sys +from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union -from azure.core.exceptions import HttpResponseError -import msrest.serialization +from .. import _serialization -from ._azure_blob_storage_enums import * +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from .. import models as _models +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -class AccessPolicy(msrest.serialization.Model): + +class AccessPolicy(_serialization.Model): """An Access policy. - :param start: the date-time the policy is active. - :type start: str - :param expiry: the date-time the policy expires. - :type expiry: str - :param permission: the permissions for the acl policy. - :type permission: str + :ivar start: the date-time the policy is active. + :vartype start: str + :ivar expiry: the date-time the policy expires. + :vartype expiry: str + :ivar permission: the permissions for the acl policy. + :vartype permission: str """ _attribute_map = { - 'start': {'key': 'Start', 'type': 'str'}, - 'expiry': {'key': 'Expiry', 'type': 'str'}, - 'permission': {'key': 'Permission', 'type': 'str'}, + "start": {"key": "Start", "type": "str"}, + "expiry": {"key": "Expiry", "type": "str"}, + "permission": {"key": "Permission", "type": "str"}, } def __init__( @@ -38,104 +47,117 @@ def __init__( start: Optional[str] = None, expiry: Optional[str] = None, permission: Optional[str] = None, - **kwargs - ): - super(AccessPolicy, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword start: the date-time the policy is active. + :paramtype start: str + :keyword expiry: the date-time the policy expires. + :paramtype expiry: str + :keyword permission: the permissions for the acl policy. + :paramtype permission: str + """ + super().__init__(**kwargs) self.start = start self.expiry = expiry self.permission = permission -class AppendPositionAccessConditions(msrest.serialization.Model): +class AppendPositionAccessConditions(_serialization.Model): """Parameter group. - :param max_size: Optional conditional header. The max length in bytes permitted for the append + :ivar max_size: Optional conditional header. The max length in bytes permitted for the append blob. If the Append Block operation would cause the blob to exceed that limit or if the blob size is already greater than the value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). - :type max_size: long - :param append_position: Optional conditional header, used only for the Append Block operation. - A number indicating the byte offset to compare. Append Block will succeed only if the append + :vartype max_size: int + :ivar append_position: Optional conditional header, used only for the Append Block operation. A + number indicating the byte offset to compare. Append Block will succeed only if the append position is equal to this number. If it is not, the request will fail with the AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). - :type append_position: long + :vartype append_position: int """ _attribute_map = { - 'max_size': {'key': 'maxSize', 'type': 'long'}, - 'append_position': {'key': 'appendPosition', 'type': 'long'}, - } - - def __init__( - self, - *, - max_size: Optional[int] = None, - append_position: Optional[int] = None, - **kwargs - ): - super(AppendPositionAccessConditions, self).__init__(**kwargs) + "max_size": {"key": "maxSize", "type": "int"}, + "append_position": {"key": "appendPosition", "type": "int"}, + } + + def __init__(self, *, max_size: Optional[int] = None, append_position: Optional[int] = None, **kwargs: Any) -> None: + """ + :keyword max_size: Optional conditional header. The max length in bytes permitted for the + append blob. If the Append Block operation would cause the blob to exceed that limit or if the + blob size is already greater than the value specified in this header, the request will fail + with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :paramtype max_size: int + :keyword append_position: Optional conditional header, used only for the Append Block + operation. A number indicating the byte offset to compare. Append Block will succeed only if + the append position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). + :paramtype append_position: int + """ + super().__init__(**kwargs) self.max_size = max_size self.append_position = append_position -class ArrowConfiguration(msrest.serialization.Model): - """arrow configuration. +class ArrowConfiguration(_serialization.Model): + """Groups the settings used for formatting the response if the response should be Arrow formatted. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param schema: Required. - :type schema: list[~azure.storage.blob.models.ArrowField] + :ivar schema: Required. + :vartype schema: list[~azure.storage.blob.models.ArrowField] """ _validation = { - 'schema': {'required': True}, + "schema": {"required": True}, } _attribute_map = { - 'schema': {'key': 'Schema', 'type': '[ArrowField]', 'xml': {'name': 'Schema', 'wrapped': True, 'itemsName': 'Field'}}, - } - _xml_map = { - 'name': 'ArrowConfiguration' - } - - def __init__( - self, - *, - schema: List["ArrowField"], - **kwargs - ): - super(ArrowConfiguration, self).__init__(**kwargs) + "schema": { + "key": "Schema", + "type": "[ArrowField]", + "xml": {"name": "Schema", "wrapped": True, "itemsName": "Field"}, + }, + } + _xml_map = {"name": "ArrowConfiguration"} + + def __init__(self, *, schema: List["_models.ArrowField"], **kwargs: Any) -> None: + """ + :keyword schema: Required. + :paramtype schema: list[~azure.storage.blob.models.ArrowField] + """ + super().__init__(**kwargs) self.schema = schema -class ArrowField(msrest.serialization.Model): - """field of an arrow schema. +class ArrowField(_serialization.Model): + """Groups settings regarding specific field of an arrow schema. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param type: Required. - :type type: str - :param name: - :type name: str - :param precision: - :type precision: int - :param scale: - :type scale: int + :ivar type: Required. + :vartype type: str + :ivar name: + :vartype name: str + :ivar precision: + :vartype precision: int + :ivar scale: + :vartype scale: int """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'type': {'key': 'Type', 'type': 'str'}, - 'name': {'key': 'Name', 'type': 'str'}, - 'precision': {'key': 'Precision', 'type': 'int'}, - 'scale': {'key': 'Scale', 'type': 'int'}, - } - _xml_map = { - 'name': 'Field' + "type": {"key": "Type", "type": "str"}, + "name": {"key": "Name", "type": "str"}, + "precision": {"key": "Precision", "type": "int"}, + "scale": {"key": "Scale", "type": "int"}, } + _xml_map = {"name": "Field"} def __init__( self, @@ -144,109 +166,120 @@ def __init__( name: Optional[str] = None, precision: Optional[int] = None, scale: Optional[int] = None, - **kwargs - ): - super(ArrowField, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword type: Required. + :paramtype type: str + :keyword name: + :paramtype name: str + :keyword precision: + :paramtype precision: int + :keyword scale: + :paramtype scale: int + """ + super().__init__(**kwargs) self.type = type self.name = name self.precision = precision self.scale = scale -class BlobFlatListSegment(msrest.serialization.Model): +class BlobFlatListSegment(_serialization.Model): """BlobFlatListSegment. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param blob_items: Required. - :type blob_items: list[~azure.storage.blob.models.BlobItemInternal] + :ivar blob_items: Required. + :vartype blob_items: list[~azure.storage.blob.models.BlobItemInternal] """ _validation = { - 'blob_items': {'required': True}, + "blob_items": {"required": True}, } _attribute_map = { - 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]'}, - } - _xml_map = { - 'name': 'Blobs' + "blob_items": {"key": "BlobItems", "type": "[BlobItemInternal]", "xml": {"itemsName": "Blob"}}, } + _xml_map = {"name": "Blobs"} - def __init__( - self, - *, - blob_items: List["BlobItemInternal"], - **kwargs - ): - super(BlobFlatListSegment, self).__init__(**kwargs) + def __init__(self, *, blob_items: List["_models.BlobItemInternal"], **kwargs: Any) -> None: + """ + :keyword blob_items: Required. + :paramtype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + super().__init__(**kwargs) self.blob_items = blob_items -class BlobHierarchyListSegment(msrest.serialization.Model): +class BlobHierarchyListSegment(_serialization.Model): """BlobHierarchyListSegment. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param blob_prefixes: - :type blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] - :param blob_items: Required. - :type blob_items: list[~azure.storage.blob.models.BlobItemInternal] + :ivar blob_prefixes: + :vartype blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] + :ivar blob_items: Required. + :vartype blob_items: list[~azure.storage.blob.models.BlobItemInternal] """ _validation = { - 'blob_items': {'required': True}, + "blob_items": {"required": True}, } _attribute_map = { - 'blob_prefixes': {'key': 'BlobPrefixes', 'type': '[BlobPrefix]', 'xml': {'name': 'BlobPrefix'}}, - 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'Blob', 'itemsName': 'Blob'}}, - } - _xml_map = { - 'name': 'Blobs' + "blob_prefixes": {"key": "BlobPrefixes", "type": "[BlobPrefix]", "xml": {"name": "BlobPrefix"}}, + "blob_items": {"key": "BlobItems", "type": "[BlobItemInternal]", "xml": {"name": "Blob", "itemsName": "Blob"}}, } + _xml_map = {"name": "Blobs"} def __init__( self, *, - blob_items: List["BlobItemInternal"], - blob_prefixes: Optional[List["BlobPrefix"]] = None, - **kwargs - ): - super(BlobHierarchyListSegment, self).__init__(**kwargs) + blob_items: List["_models.BlobItemInternal"], + blob_prefixes: Optional[List["_models.BlobPrefix"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword blob_prefixes: + :paramtype blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] + :keyword blob_items: Required. + :paramtype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + super().__init__(**kwargs) self.blob_prefixes = blob_prefixes self.blob_items = blob_items -class BlobHTTPHeaders(msrest.serialization.Model): +class BlobHTTPHeaders(_serialization.Model): """Parameter group. - :param blob_cache_control: Optional. Sets the blob's cache control. If specified, this property + :ivar blob_cache_control: Optional. Sets the blob's cache control. If specified, this property is stored with the blob and returned with a read request. - :type blob_cache_control: str - :param blob_content_type: Optional. Sets the blob's content type. If specified, this property - is stored with the blob and returned with a read request. - :type blob_content_type: str - :param blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is not + :vartype blob_cache_control: str + :ivar blob_content_type: Optional. Sets the blob's content type. If specified, this property is + stored with the blob and returned with a read request. + :vartype blob_content_type: str + :ivar blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for the individual blocks were validated when each was uploaded. - :type blob_content_md5: bytearray - :param blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + :vartype blob_content_md5: bytes + :ivar blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this property is stored with the blob and returned with a read request. - :type blob_content_encoding: str - :param blob_content_language: Optional. Set the blob's content language. If specified, this + :vartype blob_content_encoding: str + :ivar blob_content_language: Optional. Set the blob's content language. If specified, this property is stored with the blob and returned with a read request. - :type blob_content_language: str - :param blob_content_disposition: Optional. Sets the blob's Content-Disposition header. - :type blob_content_disposition: str + :vartype blob_content_language: str + :ivar blob_content_disposition: Optional. Sets the blob's Content-Disposition header. + :vartype blob_content_disposition: str """ _attribute_map = { - 'blob_cache_control': {'key': 'blobCacheControl', 'type': 'str'}, - 'blob_content_type': {'key': 'blobContentType', 'type': 'str'}, - 'blob_content_md5': {'key': 'blobContentMD5', 'type': 'bytearray'}, - 'blob_content_encoding': {'key': 'blobContentEncoding', 'type': 'str'}, - 'blob_content_language': {'key': 'blobContentLanguage', 'type': 'str'}, - 'blob_content_disposition': {'key': 'blobContentDisposition', 'type': 'str'}, + "blob_cache_control": {"key": "blobCacheControl", "type": "str"}, + "blob_content_type": {"key": "blobContentType", "type": "str"}, + "blob_content_md5": {"key": "blobContentMD5", "type": "bytearray"}, + "blob_content_encoding": {"key": "blobContentEncoding", "type": "str"}, + "blob_content_language": {"key": "blobContentLanguage", "type": "str"}, + "blob_content_disposition": {"key": "blobContentDisposition", "type": "str"}, } def __init__( @@ -254,13 +287,32 @@ def __init__( *, blob_cache_control: Optional[str] = None, blob_content_type: Optional[str] = None, - blob_content_md5: Optional[bytearray] = None, + blob_content_md5: Optional[bytes] = None, blob_content_encoding: Optional[str] = None, blob_content_language: Optional[str] = None, blob_content_disposition: Optional[str] = None, - **kwargs - ): - super(BlobHTTPHeaders, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. + :paramtype blob_content_type: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + :paramtype blob_content_md5: bytes + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. + :paramtype blob_content_language: str + :keyword blob_content_disposition: Optional. Sets the blob's Content-Disposition header. + :paramtype blob_content_disposition: str + """ + super().__init__(**kwargs) self.blob_cache_control = blob_cache_control self.blob_content_type = blob_content_type self.blob_content_md5 = blob_content_md5 @@ -269,68 +321,92 @@ def __init__( self.blob_content_disposition = blob_content_disposition -class BlobItemInternal(msrest.serialization.Model): +class BlobItemInternal(_serialization.Model): """An Azure Storage blob. - All required parameters must be populated in order to send to Azure. - - :param name: Required. - :type name: str - :param deleted: Required. - :type deleted: bool - :param snapshot: Required. - :type snapshot: str - :param version_id: - :type version_id: str - :param is_current_version: - :type is_current_version: bool - :param properties: Required. Properties of a blob. - :type properties: ~azure.storage.blob.models.BlobPropertiesInternal - :param metadata: - :type metadata: ~azure.storage.blob.models.BlobMetadata - :param blob_tags: Blob tags. - :type blob_tags: ~azure.storage.blob.models.BlobTags - :param object_replication_metadata: Dictionary of :code:``. - :type object_replication_metadata: dict[str, str] + All required parameters must be populated in order to send to server. + + :ivar name: Required. + :vartype name: ~azure.storage.blob.models.BlobName + :ivar deleted: Required. + :vartype deleted: bool + :ivar snapshot: Required. + :vartype snapshot: str + :ivar version_id: + :vartype version_id: str + :ivar is_current_version: + :vartype is_current_version: bool + :ivar properties: Properties of a blob. Required. + :vartype properties: ~azure.storage.blob.models.BlobPropertiesInternal + :ivar metadata: + :vartype metadata: ~azure.storage.blob.models.BlobMetadata + :ivar blob_tags: Blob tags. + :vartype blob_tags: ~azure.storage.blob.models.BlobTags + :ivar has_versions_only: + :vartype has_versions_only: bool + :ivar object_replication_metadata: Dictionary of :code:``. + :vartype object_replication_metadata: dict[str, str] """ _validation = { - 'name': {'required': True}, - 'deleted': {'required': True}, - 'snapshot': {'required': True}, - 'properties': {'required': True}, + "name": {"required": True}, + "deleted": {"required": True}, + "snapshot": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, - 'deleted': {'key': 'Deleted', 'type': 'bool'}, - 'snapshot': {'key': 'Snapshot', 'type': 'str'}, - 'version_id': {'key': 'VersionId', 'type': 'str'}, - 'is_current_version': {'key': 'IsCurrentVersion', 'type': 'bool'}, - 'properties': {'key': 'Properties', 'type': 'BlobPropertiesInternal'}, - 'metadata': {'key': 'Metadata', 'type': 'BlobMetadata'}, - 'blob_tags': {'key': 'BlobTags', 'type': 'BlobTags'}, - 'object_replication_metadata': {'key': 'OrMetadata', 'type': '{str}'}, - } - _xml_map = { - 'name': 'Blob' - } + "name": {"key": "Name", "type": "BlobName"}, + "deleted": {"key": "Deleted", "type": "bool"}, + "snapshot": {"key": "Snapshot", "type": "str"}, + "version_id": {"key": "VersionId", "type": "str"}, + "is_current_version": {"key": "IsCurrentVersion", "type": "bool"}, + "properties": {"key": "Properties", "type": "BlobPropertiesInternal"}, + "metadata": {"key": "Metadata", "type": "BlobMetadata"}, + "blob_tags": {"key": "BlobTags", "type": "BlobTags"}, + "has_versions_only": {"key": "HasVersionsOnly", "type": "bool"}, + "object_replication_metadata": {"key": "OrMetadata", "type": "{str}"}, + } + _xml_map = {"name": "Blob"} def __init__( self, *, - name: str, + name: "_models.BlobName", deleted: bool, snapshot: str, - properties: "BlobPropertiesInternal", + properties: "_models.BlobPropertiesInternal", version_id: Optional[str] = None, is_current_version: Optional[bool] = None, - metadata: Optional["BlobMetadata"] = None, - blob_tags: Optional["BlobTags"] = None, + metadata: Optional["_models.BlobMetadata"] = None, + blob_tags: Optional["_models.BlobTags"] = None, + has_versions_only: Optional[bool] = None, object_replication_metadata: Optional[Dict[str, str]] = None, - **kwargs - ): - super(BlobItemInternal, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword name: Required. + :paramtype name: ~azure.storage.blob.models.BlobName + :keyword deleted: Required. + :paramtype deleted: bool + :keyword snapshot: Required. + :paramtype snapshot: str + :keyword version_id: + :paramtype version_id: str + :keyword is_current_version: + :paramtype is_current_version: bool + :keyword properties: Properties of a blob. Required. + :paramtype properties: ~azure.storage.blob.models.BlobPropertiesInternal + :keyword metadata: + :paramtype metadata: ~azure.storage.blob.models.BlobMetadata + :keyword blob_tags: Blob tags. + :paramtype blob_tags: ~azure.storage.blob.models.BlobTags + :keyword has_versions_only: + :paramtype has_versions_only: bool + :keyword object_replication_metadata: Dictionary of :code:``. + :paramtype object_replication_metadata: dict[str, str] + """ + super().__init__(**kwargs) self.name = name self.deleted = deleted self.snapshot = snapshot @@ -339,201 +415,235 @@ def __init__( self.properties = properties self.metadata = metadata self.blob_tags = blob_tags + self.has_versions_only = has_versions_only self.object_replication_metadata = object_replication_metadata -class BlobMetadata(msrest.serialization.Model): +class BlobMetadata(_serialization.Model): """BlobMetadata. - :param additional_properties: Unmatched properties from the message are deserialized to this + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, str] - :param encrypted: - :type encrypted: str + :vartype additional_properties: dict[str, str] + :ivar encrypted: + :vartype encrypted: str """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{str}'}, - 'encrypted': {'key': 'Encrypted', 'type': 'str', 'xml': {'attr': True}}, - } - _xml_map = { - 'name': 'Metadata' + "additional_properties": {"key": "", "type": "{str}"}, + "encrypted": {"key": "Encrypted", "type": "str", "xml": {"attr": True}}, } + _xml_map = {"name": "Metadata"} def __init__( - self, - *, - additional_properties: Optional[Dict[str, str]] = None, - encrypted: Optional[str] = None, - **kwargs - ): - super(BlobMetadata, self).__init__(**kwargs) + self, *, additional_properties: Optional[Dict[str, str]] = None, encrypted: Optional[str] = None, **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, str] + :keyword encrypted: + :paramtype encrypted: str + """ + super().__init__(**kwargs) self.additional_properties = additional_properties self.encrypted = encrypted -class BlobPrefix(msrest.serialization.Model): +class BlobName(_serialization.Model): + """BlobName. + + :ivar encoded: Indicates if the blob name is encoded. + :vartype encoded: bool + :ivar content: The name of the blob. + :vartype content: str + """ + + _attribute_map = { + "encoded": {"key": "Encoded", "type": "bool", "xml": {"name": "Encoded", "attr": True}}, + "content": {"key": "content", "type": "str", "xml": {"text": True}}, + } + + def __init__(self, *, encoded: Optional[bool] = None, content: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword encoded: Indicates if the blob name is encoded. + :paramtype encoded: bool + :keyword content: The name of the blob. + :paramtype content: str + """ + super().__init__(**kwargs) + self.encoded = encoded + self.content = content + + +class BlobPrefix(_serialization.Model): """BlobPrefix. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param name: Required. - :type name: str + :ivar name: Required. + :vartype name: ~azure.storage.blob.models.BlobName """ _validation = { - 'name': {'required': True}, + "name": {"required": True}, } _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, + "name": {"key": "Name", "type": "BlobName"}, } - def __init__( - self, - *, - name: str, - **kwargs - ): - super(BlobPrefix, self).__init__(**kwargs) + def __init__(self, *, name: "_models.BlobName", **kwargs: Any) -> None: + """ + :keyword name: Required. + :paramtype name: ~azure.storage.blob.models.BlobName + """ + super().__init__(**kwargs) self.name = name -class BlobPropertiesInternal(msrest.serialization.Model): +class BlobPropertiesInternal(_serialization.Model): # pylint: disable=too-many-instance-attributes """Properties of a blob. - All required parameters must be populated in order to send to Azure. - - :param creation_time: - :type creation_time: ~datetime.datetime - :param last_modified: Required. - :type last_modified: ~datetime.datetime - :param etag: Required. - :type etag: str - :param content_length: Size in bytes. - :type content_length: long - :param content_type: - :type content_type: str - :param content_encoding: - :type content_encoding: str - :param content_language: - :type content_language: str - :param content_md5: - :type content_md5: bytearray - :param content_disposition: - :type content_disposition: str - :param cache_control: - :type cache_control: str - :param blob_sequence_number: - :type blob_sequence_number: long - :param blob_type: Possible values include: "BlockBlob", "PageBlob", "AppendBlob". - :type blob_type: str or ~azure.storage.blob.models.BlobType - :param lease_status: Possible values include: "locked", "unlocked". - :type lease_status: str or ~azure.storage.blob.models.LeaseStatusType - :param lease_state: Possible values include: "available", "leased", "expired", "breaking", + All required parameters must be populated in order to send to server. + + :ivar creation_time: + :vartype creation_time: ~datetime.datetime + :ivar last_modified: Required. + :vartype last_modified: ~datetime.datetime + :ivar etag: Required. + :vartype etag: str + :ivar content_length: Size in bytes. + :vartype content_length: int + :ivar content_type: + :vartype content_type: str + :ivar content_encoding: + :vartype content_encoding: str + :ivar content_language: + :vartype content_language: str + :ivar content_md5: + :vartype content_md5: bytes + :ivar content_disposition: + :vartype content_disposition: str + :ivar cache_control: + :vartype cache_control: str + :ivar blob_sequence_number: + :vartype blob_sequence_number: int + :ivar blob_type: Known values are: "BlockBlob", "PageBlob", and "AppendBlob". + :vartype blob_type: str or ~azure.storage.blob.models.BlobType + :ivar lease_status: Known values are: "locked" and "unlocked". + :vartype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :ivar lease_state: Known values are: "available", "leased", "expired", "breaking", and "broken". - :type lease_state: str or ~azure.storage.blob.models.LeaseStateType - :param lease_duration: Possible values include: "infinite", "fixed". - :type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType - :param copy_id: - :type copy_id: str - :param copy_status: Possible values include: "pending", "success", "aborted", "failed". - :type copy_status: str or ~azure.storage.blob.models.CopyStatusType - :param copy_source: - :type copy_source: str - :param copy_progress: - :type copy_progress: str - :param copy_completion_time: - :type copy_completion_time: ~datetime.datetime - :param copy_status_description: - :type copy_status_description: str - :param server_encrypted: - :type server_encrypted: bool - :param incremental_copy: - :type incremental_copy: bool - :param destination_snapshot: - :type destination_snapshot: str - :param deleted_time: - :type deleted_time: ~datetime.datetime - :param remaining_retention_days: - :type remaining_retention_days: int - :param access_tier: Possible values include: "P4", "P6", "P10", "P15", "P20", "P30", "P40", - "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive". - :type access_tier: str or ~azure.storage.blob.models.AccessTier - :param access_tier_inferred: - :type access_tier_inferred: bool - :param archive_status: Possible values include: "rehydrate-pending-to-hot", "rehydrate- - pending-to-cool". - :type archive_status: str or ~azure.storage.blob.models.ArchiveStatus - :param customer_provided_key_sha256: - :type customer_provided_key_sha256: str - :param encryption_scope: The name of the encryption scope under which the blob is encrypted. - :type encryption_scope: str - :param access_tier_change_time: - :type access_tier_change_time: ~datetime.datetime - :param tag_count: - :type tag_count: int - :param expires_on: - :type expires_on: ~datetime.datetime - :param is_sealed: - :type is_sealed: bool - :param rehydrate_priority: If an object is in rehydrate pending state then this header is - returned with priority of rehydrate. Valid values are High and Standard. Possible values - include: "High", "Standard". - :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority - :param last_accessed_on: - :type last_accessed_on: ~datetime.datetime + :vartype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :ivar lease_duration: Known values are: "infinite" and "fixed". + :vartype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :ivar copy_id: + :vartype copy_id: str + :ivar copy_status: Known values are: "pending", "success", "aborted", and "failed". + :vartype copy_status: str or ~azure.storage.blob.models.CopyStatusType + :ivar copy_source: + :vartype copy_source: str + :ivar copy_progress: + :vartype copy_progress: str + :ivar copy_completion_time: + :vartype copy_completion_time: ~datetime.datetime + :ivar copy_status_description: + :vartype copy_status_description: str + :ivar server_encrypted: + :vartype server_encrypted: bool + :ivar incremental_copy: + :vartype incremental_copy: bool + :ivar destination_snapshot: + :vartype destination_snapshot: str + :ivar deleted_time: + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: + :vartype remaining_retention_days: int + :ivar access_tier: Known values are: "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", + "P60", "P70", "P80", "Hot", "Cool", "Archive", "Premium", and "Cold". + :vartype access_tier: str or ~azure.storage.blob.models.AccessTier + :ivar access_tier_inferred: + :vartype access_tier_inferred: bool + :ivar archive_status: Known values are: "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool", and "rehydrate-pending-to-cold". + :vartype archive_status: str or ~azure.storage.blob.models.ArchiveStatus + :ivar customer_provided_key_sha256: + :vartype customer_provided_key_sha256: str + :ivar encryption_scope: The name of the encryption scope under which the blob is encrypted. + :vartype encryption_scope: str + :ivar access_tier_change_time: + :vartype access_tier_change_time: ~datetime.datetime + :ivar tag_count: + :vartype tag_count: int + :ivar expires_on: + :vartype expires_on: ~datetime.datetime + :ivar is_sealed: + :vartype is_sealed: bool + :ivar rehydrate_priority: If an object is in rehydrate pending state then this header is + returned with priority of rehydrate. Valid values are High and Standard. Known values are: + "High" and "Standard". + :vartype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :ivar last_accessed_on: + :vartype last_accessed_on: ~datetime.datetime + :ivar immutability_policy_expires_on: + :vartype immutability_policy_expires_on: ~datetime.datetime + :ivar immutability_policy_mode: Known values are: "Mutable", "Unlocked", and "Locked". + :vartype immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :ivar legal_hold: + :vartype legal_hold: bool """ _validation = { - 'last_modified': {'required': True}, - 'etag': {'required': True}, + "last_modified": {"required": True}, + "etag": {"required": True}, } _attribute_map = { - 'creation_time': {'key': 'Creation-Time', 'type': 'rfc-1123'}, - 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, - 'etag': {'key': 'Etag', 'type': 'str'}, - 'content_length': {'key': 'Content-Length', 'type': 'long'}, - 'content_type': {'key': 'Content-Type', 'type': 'str'}, - 'content_encoding': {'key': 'Content-Encoding', 'type': 'str'}, - 'content_language': {'key': 'Content-Language', 'type': 'str'}, - 'content_md5': {'key': 'Content-MD5', 'type': 'bytearray'}, - 'content_disposition': {'key': 'Content-Disposition', 'type': 'str'}, - 'cache_control': {'key': 'Cache-Control', 'type': 'str'}, - 'blob_sequence_number': {'key': 'x-ms-blob-sequence-number', 'type': 'long'}, - 'blob_type': {'key': 'BlobType', 'type': 'str'}, - 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, - 'lease_state': {'key': 'LeaseState', 'type': 'str'}, - 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, - 'copy_id': {'key': 'CopyId', 'type': 'str'}, - 'copy_status': {'key': 'CopyStatus', 'type': 'str'}, - 'copy_source': {'key': 'CopySource', 'type': 'str'}, - 'copy_progress': {'key': 'CopyProgress', 'type': 'str'}, - 'copy_completion_time': {'key': 'CopyCompletionTime', 'type': 'rfc-1123'}, - 'copy_status_description': {'key': 'CopyStatusDescription', 'type': 'str'}, - 'server_encrypted': {'key': 'ServerEncrypted', 'type': 'bool'}, - 'incremental_copy': {'key': 'IncrementalCopy', 'type': 'bool'}, - 'destination_snapshot': {'key': 'DestinationSnapshot', 'type': 'str'}, - 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, - 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, - 'access_tier': {'key': 'AccessTier', 'type': 'str'}, - 'access_tier_inferred': {'key': 'AccessTierInferred', 'type': 'bool'}, - 'archive_status': {'key': 'ArchiveStatus', 'type': 'str'}, - 'customer_provided_key_sha256': {'key': 'CustomerProvidedKeySha256', 'type': 'str'}, - 'encryption_scope': {'key': 'EncryptionScope', 'type': 'str'}, - 'access_tier_change_time': {'key': 'AccessTierChangeTime', 'type': 'rfc-1123'}, - 'tag_count': {'key': 'TagCount', 'type': 'int'}, - 'expires_on': {'key': 'Expiry-Time', 'type': 'rfc-1123'}, - 'is_sealed': {'key': 'Sealed', 'type': 'bool'}, - 'rehydrate_priority': {'key': 'RehydratePriority', 'type': 'str'}, - 'last_accessed_on': {'key': 'LastAccessTime', 'type': 'rfc-1123'}, - } - _xml_map = { - 'name': 'Properties' - } - - def __init__( + "creation_time": {"key": "Creation-Time", "type": "rfc-1123"}, + "last_modified": {"key": "Last-Modified", "type": "rfc-1123"}, + "etag": {"key": "Etag", "type": "str"}, + "content_length": {"key": "Content-Length", "type": "int"}, + "content_type": {"key": "Content-Type", "type": "str"}, + "content_encoding": {"key": "Content-Encoding", "type": "str"}, + "content_language": {"key": "Content-Language", "type": "str"}, + "content_md5": {"key": "Content-MD5", "type": "bytearray"}, + "content_disposition": {"key": "Content-Disposition", "type": "str"}, + "cache_control": {"key": "Cache-Control", "type": "str"}, + "blob_sequence_number": {"key": "x-ms-blob-sequence-number", "type": "int"}, + "blob_type": {"key": "BlobType", "type": "str"}, + "lease_status": {"key": "LeaseStatus", "type": "str"}, + "lease_state": {"key": "LeaseState", "type": "str"}, + "lease_duration": {"key": "LeaseDuration", "type": "str"}, + "copy_id": {"key": "CopyId", "type": "str"}, + "copy_status": {"key": "CopyStatus", "type": "str"}, + "copy_source": {"key": "CopySource", "type": "str"}, + "copy_progress": {"key": "CopyProgress", "type": "str"}, + "copy_completion_time": {"key": "CopyCompletionTime", "type": "rfc-1123"}, + "copy_status_description": {"key": "CopyStatusDescription", "type": "str"}, + "server_encrypted": {"key": "ServerEncrypted", "type": "bool"}, + "incremental_copy": {"key": "IncrementalCopy", "type": "bool"}, + "destination_snapshot": {"key": "DestinationSnapshot", "type": "str"}, + "deleted_time": {"key": "DeletedTime", "type": "rfc-1123"}, + "remaining_retention_days": {"key": "RemainingRetentionDays", "type": "int"}, + "access_tier": {"key": "AccessTier", "type": "str"}, + "access_tier_inferred": {"key": "AccessTierInferred", "type": "bool"}, + "archive_status": {"key": "ArchiveStatus", "type": "str"}, + "customer_provided_key_sha256": {"key": "CustomerProvidedKeySha256", "type": "str"}, + "encryption_scope": {"key": "EncryptionScope", "type": "str"}, + "access_tier_change_time": {"key": "AccessTierChangeTime", "type": "rfc-1123"}, + "tag_count": {"key": "TagCount", "type": "int"}, + "expires_on": {"key": "Expiry-Time", "type": "rfc-1123"}, + "is_sealed": {"key": "Sealed", "type": "bool"}, + "rehydrate_priority": {"key": "RehydratePriority", "type": "str"}, + "last_accessed_on": {"key": "LastAccessTime", "type": "rfc-1123"}, + "immutability_policy_expires_on": {"key": "ImmutabilityPolicyUntilDate", "type": "rfc-1123"}, + "immutability_policy_mode": {"key": "ImmutabilityPolicyMode", "type": "str"}, + "legal_hold": {"key": "LegalHold", "type": "bool"}, + } + _xml_map = {"name": "Properties"} + + def __init__( # pylint: disable=too-many-locals self, *, last_modified: datetime.datetime, @@ -543,16 +653,16 @@ def __init__( content_type: Optional[str] = None, content_encoding: Optional[str] = None, content_language: Optional[str] = None, - content_md5: Optional[bytearray] = None, + content_md5: Optional[bytes] = None, content_disposition: Optional[str] = None, cache_control: Optional[str] = None, blob_sequence_number: Optional[int] = None, - blob_type: Optional[Union[str, "BlobType"]] = None, - lease_status: Optional[Union[str, "LeaseStatusType"]] = None, - lease_state: Optional[Union[str, "LeaseStateType"]] = None, - lease_duration: Optional[Union[str, "LeaseDurationType"]] = None, + blob_type: Optional[Union[str, "_models.BlobType"]] = None, + lease_status: Optional[Union[str, "_models.LeaseStatusType"]] = None, + lease_state: Optional[Union[str, "_models.LeaseStateType"]] = None, + lease_duration: Optional[Union[str, "_models.LeaseDurationType"]] = None, copy_id: Optional[str] = None, - copy_status: Optional[Union[str, "CopyStatusType"]] = None, + copy_status: Optional[Union[str, "_models.CopyStatusType"]] = None, copy_source: Optional[str] = None, copy_progress: Optional[str] = None, copy_completion_time: Optional[datetime.datetime] = None, @@ -562,20 +672,111 @@ def __init__( destination_snapshot: Optional[str] = None, deleted_time: Optional[datetime.datetime] = None, remaining_retention_days: Optional[int] = None, - access_tier: Optional[Union[str, "AccessTier"]] = None, + access_tier: Optional[Union[str, "_models.AccessTier"]] = None, access_tier_inferred: Optional[bool] = None, - archive_status: Optional[Union[str, "ArchiveStatus"]] = None, + archive_status: Optional[Union[str, "_models.ArchiveStatus"]] = None, customer_provided_key_sha256: Optional[str] = None, encryption_scope: Optional[str] = None, access_tier_change_time: Optional[datetime.datetime] = None, tag_count: Optional[int] = None, expires_on: Optional[datetime.datetime] = None, is_sealed: Optional[bool] = None, - rehydrate_priority: Optional[Union[str, "RehydratePriority"]] = None, + rehydrate_priority: Optional[Union[str, "_models.RehydratePriority"]] = None, last_accessed_on: Optional[datetime.datetime] = None, - **kwargs - ): - super(BlobPropertiesInternal, self).__init__(**kwargs) + immutability_policy_expires_on: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + :keyword creation_time: + :paramtype creation_time: ~datetime.datetime + :keyword last_modified: Required. + :paramtype last_modified: ~datetime.datetime + :keyword etag: Required. + :paramtype etag: str + :keyword content_length: Size in bytes. + :paramtype content_length: int + :keyword content_type: + :paramtype content_type: str + :keyword content_encoding: + :paramtype content_encoding: str + :keyword content_language: + :paramtype content_language: str + :keyword content_md5: + :paramtype content_md5: bytes + :keyword content_disposition: + :paramtype content_disposition: str + :keyword cache_control: + :paramtype cache_control: str + :keyword blob_sequence_number: + :paramtype blob_sequence_number: int + :keyword blob_type: Known values are: "BlockBlob", "PageBlob", and "AppendBlob". + :paramtype blob_type: str or ~azure.storage.blob.models.BlobType + :keyword lease_status: Known values are: "locked" and "unlocked". + :paramtype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :keyword lease_state: Known values are: "available", "leased", "expired", "breaking", and + "broken". + :paramtype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :keyword lease_duration: Known values are: "infinite" and "fixed". + :paramtype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :keyword copy_id: + :paramtype copy_id: str + :keyword copy_status: Known values are: "pending", "success", "aborted", and "failed". + :paramtype copy_status: str or ~azure.storage.blob.models.CopyStatusType + :keyword copy_source: + :paramtype copy_source: str + :keyword copy_progress: + :paramtype copy_progress: str + :keyword copy_completion_time: + :paramtype copy_completion_time: ~datetime.datetime + :keyword copy_status_description: + :paramtype copy_status_description: str + :keyword server_encrypted: + :paramtype server_encrypted: bool + :keyword incremental_copy: + :paramtype incremental_copy: bool + :keyword destination_snapshot: + :paramtype destination_snapshot: str + :keyword deleted_time: + :paramtype deleted_time: ~datetime.datetime + :keyword remaining_retention_days: + :paramtype remaining_retention_days: int + :keyword access_tier: Known values are: "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", + "P60", "P70", "P80", "Hot", "Cool", "Archive", "Premium", and "Cold". + :paramtype access_tier: str or ~azure.storage.blob.models.AccessTier + :keyword access_tier_inferred: + :paramtype access_tier_inferred: bool + :keyword archive_status: Known values are: "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool", and "rehydrate-pending-to-cold". + :paramtype archive_status: str or ~azure.storage.blob.models.ArchiveStatus + :keyword customer_provided_key_sha256: + :paramtype customer_provided_key_sha256: str + :keyword encryption_scope: The name of the encryption scope under which the blob is encrypted. + :paramtype encryption_scope: str + :keyword access_tier_change_time: + :paramtype access_tier_change_time: ~datetime.datetime + :keyword tag_count: + :paramtype tag_count: int + :keyword expires_on: + :paramtype expires_on: ~datetime.datetime + :keyword is_sealed: + :paramtype is_sealed: bool + :keyword rehydrate_priority: If an object is in rehydrate pending state then this header is + returned with priority of rehydrate. Valid values are High and Standard. Known values are: + "High" and "Standard". + :paramtype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :keyword last_accessed_on: + :paramtype last_accessed_on: ~datetime.datetime + :keyword immutability_policy_expires_on: + :paramtype immutability_policy_expires_on: ~datetime.datetime + :keyword immutability_policy_mode: Known values are: "Mutable", "Unlocked", and "Locked". + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: + :paramtype legal_hold: bool + """ + super().__init__(**kwargs) self.creation_time = creation_time self.last_modified = last_modified self.etag = etag @@ -613,152 +814,158 @@ def __init__( self.is_sealed = is_sealed self.rehydrate_priority = rehydrate_priority self.last_accessed_on = last_accessed_on + self.immutability_policy_expires_on = immutability_policy_expires_on + self.immutability_policy_mode = immutability_policy_mode + self.legal_hold = legal_hold -class BlobTag(msrest.serialization.Model): +class BlobTag(_serialization.Model): """BlobTag. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param key: Required. - :type key: str - :param value: Required. - :type value: str + :ivar key: Required. + :vartype key: str + :ivar value: Required. + :vartype value: str """ _validation = { - 'key': {'required': True}, - 'value': {'required': True}, + "key": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'key': {'key': 'Key', 'type': 'str'}, - 'value': {'key': 'Value', 'type': 'str'}, - } - _xml_map = { - 'name': 'Tag' - } - - def __init__( - self, - *, - key: str, - value: str, - **kwargs - ): - super(BlobTag, self).__init__(**kwargs) + "key": {"key": "Key", "type": "str"}, + "value": {"key": "Value", "type": "str"}, + } + _xml_map = {"name": "Tag"} + + def __init__(self, *, key: str, value: str, **kwargs: Any) -> None: + """ + :keyword key: Required. + :paramtype key: str + :keyword value: Required. + :paramtype value: str + """ + super().__init__(**kwargs) self.key = key self.value = value -class BlobTags(msrest.serialization.Model): +class BlobTags(_serialization.Model): """Blob tags. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param blob_tag_set: Required. - :type blob_tag_set: list[~azure.storage.blob.models.BlobTag] + :ivar blob_tag_set: Required. + :vartype blob_tag_set: list[~azure.storage.blob.models.BlobTag] """ _validation = { - 'blob_tag_set': {'required': True}, + "blob_tag_set": {"required": True}, } _attribute_map = { - 'blob_tag_set': {'key': 'BlobTagSet', 'type': '[BlobTag]', 'xml': {'name': 'TagSet', 'wrapped': True, 'itemsName': 'Tag'}}, - } - _xml_map = { - 'name': 'Tags' - } - - def __init__( - self, - *, - blob_tag_set: List["BlobTag"], - **kwargs - ): - super(BlobTags, self).__init__(**kwargs) + "blob_tag_set": { + "key": "BlobTagSet", + "type": "[BlobTag]", + "xml": {"name": "TagSet", "wrapped": True, "itemsName": "Tag"}, + }, + } + _xml_map = {"name": "Tags"} + + def __init__(self, *, blob_tag_set: List["_models.BlobTag"], **kwargs: Any) -> None: + """ + :keyword blob_tag_set: Required. + :paramtype blob_tag_set: list[~azure.storage.blob.models.BlobTag] + """ + super().__init__(**kwargs) self.blob_tag_set = blob_tag_set -class Block(msrest.serialization.Model): +class Block(_serialization.Model): """Represents a single block in a block blob. It describes the block's ID and size. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param name: Required. The base64 encoded block ID. - :type name: str - :param size: Required. The block size in bytes. - :type size: int + :ivar name: The base64 encoded block ID. Required. + :vartype name: str + :ivar size: The block size in bytes. Required. + :vartype size: int """ _validation = { - 'name': {'required': True}, - 'size': {'required': True}, + "name": {"required": True}, + "size": {"required": True}, } _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, - 'size': {'key': 'Size', 'type': 'int'}, - } - - def __init__( - self, - *, - name: str, - size: int, - **kwargs - ): - super(Block, self).__init__(**kwargs) + "name": {"key": "Name", "type": "str"}, + "size": {"key": "Size", "type": "int"}, + } + + def __init__(self, *, name: str, size: int, **kwargs: Any) -> None: + """ + :keyword name: The base64 encoded block ID. Required. + :paramtype name: str + :keyword size: The block size in bytes. Required. + :paramtype size: int + """ + super().__init__(**kwargs) self.name = name self.size = size -class BlockList(msrest.serialization.Model): +class BlockList(_serialization.Model): """BlockList. - :param committed_blocks: - :type committed_blocks: list[~azure.storage.blob.models.Block] - :param uncommitted_blocks: - :type uncommitted_blocks: list[~azure.storage.blob.models.Block] + :ivar committed_blocks: + :vartype committed_blocks: list[~azure.storage.blob.models.Block] + :ivar uncommitted_blocks: + :vartype uncommitted_blocks: list[~azure.storage.blob.models.Block] """ _attribute_map = { - 'committed_blocks': {'key': 'CommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, - 'uncommitted_blocks': {'key': 'UncommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, + "committed_blocks": {"key": "CommittedBlocks", "type": "[Block]", "xml": {"wrapped": True}}, + "uncommitted_blocks": {"key": "UncommittedBlocks", "type": "[Block]", "xml": {"wrapped": True}}, } def __init__( self, *, - committed_blocks: Optional[List["Block"]] = None, - uncommitted_blocks: Optional[List["Block"]] = None, - **kwargs - ): - super(BlockList, self).__init__(**kwargs) + committed_blocks: Optional[List["_models.Block"]] = None, + uncommitted_blocks: Optional[List["_models.Block"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword committed_blocks: + :paramtype committed_blocks: list[~azure.storage.blob.models.Block] + :keyword uncommitted_blocks: + :paramtype uncommitted_blocks: list[~azure.storage.blob.models.Block] + """ + super().__init__(**kwargs) self.committed_blocks = committed_blocks self.uncommitted_blocks = uncommitted_blocks -class BlockLookupList(msrest.serialization.Model): +class BlockLookupList(_serialization.Model): """BlockLookupList. - :param committed: - :type committed: list[str] - :param uncommitted: - :type uncommitted: list[str] - :param latest: - :type latest: list[str] + :ivar committed: + :vartype committed: list[str] + :ivar uncommitted: + :vartype uncommitted: list[str] + :ivar latest: + :vartype latest: list[str] """ _attribute_map = { - 'committed': {'key': 'Committed', 'type': '[str]', 'xml': {'itemsName': 'Committed'}}, - 'uncommitted': {'key': 'Uncommitted', 'type': '[str]', 'xml': {'itemsName': 'Uncommitted'}}, - 'latest': {'key': 'Latest', 'type': '[str]', 'xml': {'itemsName': 'Latest'}}, - } - _xml_map = { - 'name': 'BlockList' + "committed": {"key": "Committed", "type": "[str]", "xml": {"itemsName": "Committed"}}, + "uncommitted": {"key": "Uncommitted", "type": "[str]", "xml": {"itemsName": "Uncommitted"}}, + "latest": {"key": "Latest", "type": "[str]", "xml": {"itemsName": "Latest"}}, } + _xml_map = {"name": "BlockList"} def __init__( self, @@ -766,65 +973,71 @@ def __init__( committed: Optional[List[str]] = None, uncommitted: Optional[List[str]] = None, latest: Optional[List[str]] = None, - **kwargs - ): - super(BlockLookupList, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword committed: + :paramtype committed: list[str] + :keyword uncommitted: + :paramtype uncommitted: list[str] + :keyword latest: + :paramtype latest: list[str] + """ + super().__init__(**kwargs) self.committed = committed self.uncommitted = uncommitted self.latest = latest -class ClearRange(msrest.serialization.Model): +class ClearRange(_serialization.Model): """ClearRange. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param start: Required. - :type start: long - :param end: Required. - :type end: long + :ivar start: Required. + :vartype start: int + :ivar end: Required. + :vartype end: int """ _validation = { - 'start': {'required': True}, - 'end': {'required': True}, + "start": {"required": True}, + "end": {"required": True}, } _attribute_map = { - 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, - 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, - } - _xml_map = { - 'name': 'ClearRange' - } - - def __init__( - self, - *, - start: int, - end: int, - **kwargs - ): - super(ClearRange, self).__init__(**kwargs) + "start": {"key": "Start", "type": "int", "xml": {"name": "Start"}}, + "end": {"key": "End", "type": "int", "xml": {"name": "End"}}, + } + _xml_map = {"name": "ClearRange"} + + def __init__(self, *, start: int, end: int, **kwargs: Any) -> None: + """ + :keyword start: Required. + :paramtype start: int + :keyword end: Required. + :paramtype end: int + """ + super().__init__(**kwargs) self.start = start self.end = end -class ContainerCpkScopeInfo(msrest.serialization.Model): +class ContainerCpkScopeInfo(_serialization.Model): """Parameter group. - :param default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the - default encryption scope to set on the container and use for all future writes. - :type default_encryption_scope: str - :param prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, + :ivar default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the default + encryption scope to set on the container and use for all future writes. + :vartype default_encryption_scope: str + :ivar prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, prevents any request from specifying a different encryption scope than the scope set on the container. - :type prevent_encryption_scope_override: bool + :vartype prevent_encryption_scope_override: bool """ _attribute_map = { - 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, - 'prevent_encryption_scope_override': {'key': 'PreventEncryptionScopeOverride', 'type': 'bool'}, + "default_encryption_scope": {"key": "DefaultEncryptionScope", "type": "str"}, + "prevent_encryption_scope_override": {"key": "PreventEncryptionScopeOverride", "type": "bool"}, } def __init__( @@ -832,57 +1045,76 @@ def __init__( *, default_encryption_scope: Optional[str] = None, prevent_encryption_scope_override: Optional[bool] = None, - **kwargs - ): - super(ContainerCpkScopeInfo, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the + default encryption scope to set on the container and use for all future writes. + :paramtype default_encryption_scope: str + :keyword prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, + prevents any request from specifying a different encryption scope than the scope set on the + container. + :paramtype prevent_encryption_scope_override: bool + """ + super().__init__(**kwargs) self.default_encryption_scope = default_encryption_scope self.prevent_encryption_scope_override = prevent_encryption_scope_override -class ContainerItem(msrest.serialization.Model): +class ContainerItem(_serialization.Model): """An Azure Storage container. - All required parameters must be populated in order to send to Azure. - - :param name: Required. - :type name: str - :param deleted: - :type deleted: bool - :param version: - :type version: str - :param properties: Required. Properties of a container. - :type properties: ~azure.storage.blob.models.ContainerProperties - :param metadata: Dictionary of :code:``. - :type metadata: dict[str, str] + All required parameters must be populated in order to send to server. + + :ivar name: Required. + :vartype name: str + :ivar deleted: + :vartype deleted: bool + :ivar version: + :vartype version: str + :ivar properties: Properties of a container. Required. + :vartype properties: ~azure.storage.blob.models.ContainerProperties + :ivar metadata: Dictionary of :code:``. + :vartype metadata: dict[str, str] """ _validation = { - 'name': {'required': True}, - 'properties': {'required': True}, + "name": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, - 'deleted': {'key': 'Deleted', 'type': 'bool'}, - 'version': {'key': 'Version', 'type': 'str'}, - 'properties': {'key': 'Properties', 'type': 'ContainerProperties'}, - 'metadata': {'key': 'Metadata', 'type': '{str}'}, - } - _xml_map = { - 'name': 'Container' + "name": {"key": "Name", "type": "str"}, + "deleted": {"key": "Deleted", "type": "bool"}, + "version": {"key": "Version", "type": "str"}, + "properties": {"key": "Properties", "type": "ContainerProperties"}, + "metadata": {"key": "Metadata", "type": "{str}"}, } + _xml_map = {"name": "Container"} def __init__( self, *, name: str, - properties: "ContainerProperties", + properties: "_models.ContainerProperties", deleted: Optional[bool] = None, version: Optional[str] = None, metadata: Optional[Dict[str, str]] = None, - **kwargs - ): - super(ContainerItem, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword name: Required. + :paramtype name: str + :keyword deleted: + :paramtype deleted: bool + :keyword version: + :paramtype version: str + :keyword properties: Properties of a container. Required. + :paramtype properties: ~azure.storage.blob.models.ContainerProperties + :keyword metadata: Dictionary of :code:``. + :paramtype metadata: dict[str, str] + """ + super().__init__(**kwargs) self.name = name self.deleted = deleted self.version = version @@ -890,56 +1122,63 @@ def __init__( self.metadata = metadata -class ContainerProperties(msrest.serialization.Model): +class ContainerProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes """Properties of a container. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param last_modified: Required. - :type last_modified: ~datetime.datetime - :param etag: Required. - :type etag: str - :param lease_status: Possible values include: "locked", "unlocked". - :type lease_status: str or ~azure.storage.blob.models.LeaseStatusType - :param lease_state: Possible values include: "available", "leased", "expired", "breaking", + :ivar last_modified: Required. + :vartype last_modified: ~datetime.datetime + :ivar etag: Required. + :vartype etag: str + :ivar lease_status: Known values are: "locked" and "unlocked". + :vartype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :ivar lease_state: Known values are: "available", "leased", "expired", "breaking", and "broken". - :type lease_state: str or ~azure.storage.blob.models.LeaseStateType - :param lease_duration: Possible values include: "infinite", "fixed". - :type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType - :param public_access: Possible values include: "container", "blob". - :type public_access: str or ~azure.storage.blob.models.PublicAccessType - :param has_immutability_policy: - :type has_immutability_policy: bool - :param has_legal_hold: - :type has_legal_hold: bool - :param default_encryption_scope: - :type default_encryption_scope: str - :param prevent_encryption_scope_override: - :type prevent_encryption_scope_override: bool - :param deleted_time: - :type deleted_time: ~datetime.datetime - :param remaining_retention_days: - :type remaining_retention_days: int + :vartype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :ivar lease_duration: Known values are: "infinite" and "fixed". + :vartype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :ivar public_access: Known values are: "container" and "blob". + :vartype public_access: str or ~azure.storage.blob.models.PublicAccessType + :ivar has_immutability_policy: + :vartype has_immutability_policy: bool + :ivar has_legal_hold: + :vartype has_legal_hold: bool + :ivar default_encryption_scope: + :vartype default_encryption_scope: str + :ivar prevent_encryption_scope_override: + :vartype prevent_encryption_scope_override: bool + :ivar deleted_time: + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: + :vartype remaining_retention_days: int + :ivar is_immutable_storage_with_versioning_enabled: Indicates if version level worm is enabled + on this container. + :vartype is_immutable_storage_with_versioning_enabled: bool """ _validation = { - 'last_modified': {'required': True}, - 'etag': {'required': True}, + "last_modified": {"required": True}, + "etag": {"required": True}, } _attribute_map = { - 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, - 'etag': {'key': 'Etag', 'type': 'str'}, - 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, - 'lease_state': {'key': 'LeaseState', 'type': 'str'}, - 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, - 'public_access': {'key': 'PublicAccess', 'type': 'str'}, - 'has_immutability_policy': {'key': 'HasImmutabilityPolicy', 'type': 'bool'}, - 'has_legal_hold': {'key': 'HasLegalHold', 'type': 'bool'}, - 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, - 'prevent_encryption_scope_override': {'key': 'DenyEncryptionScopeOverride', 'type': 'bool'}, - 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, - 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, + "last_modified": {"key": "Last-Modified", "type": "rfc-1123"}, + "etag": {"key": "Etag", "type": "str"}, + "lease_status": {"key": "LeaseStatus", "type": "str"}, + "lease_state": {"key": "LeaseState", "type": "str"}, + "lease_duration": {"key": "LeaseDuration", "type": "str"}, + "public_access": {"key": "PublicAccess", "type": "str"}, + "has_immutability_policy": {"key": "HasImmutabilityPolicy", "type": "bool"}, + "has_legal_hold": {"key": "HasLegalHold", "type": "bool"}, + "default_encryption_scope": {"key": "DefaultEncryptionScope", "type": "str"}, + "prevent_encryption_scope_override": {"key": "DenyEncryptionScopeOverride", "type": "bool"}, + "deleted_time": {"key": "DeletedTime", "type": "rfc-1123"}, + "remaining_retention_days": {"key": "RemainingRetentionDays", "type": "int"}, + "is_immutable_storage_with_versioning_enabled": { + "key": "ImmutableStorageWithVersioningEnabled", + "type": "bool", + }, } def __init__( @@ -947,19 +1186,50 @@ def __init__( *, last_modified: datetime.datetime, etag: str, - lease_status: Optional[Union[str, "LeaseStatusType"]] = None, - lease_state: Optional[Union[str, "LeaseStateType"]] = None, - lease_duration: Optional[Union[str, "LeaseDurationType"]] = None, - public_access: Optional[Union[str, "PublicAccessType"]] = None, + lease_status: Optional[Union[str, "_models.LeaseStatusType"]] = None, + lease_state: Optional[Union[str, "_models.LeaseStateType"]] = None, + lease_duration: Optional[Union[str, "_models.LeaseDurationType"]] = None, + public_access: Optional[Union[str, "_models.PublicAccessType"]] = None, has_immutability_policy: Optional[bool] = None, has_legal_hold: Optional[bool] = None, default_encryption_scope: Optional[str] = None, prevent_encryption_scope_override: Optional[bool] = None, deleted_time: Optional[datetime.datetime] = None, remaining_retention_days: Optional[int] = None, - **kwargs - ): - super(ContainerProperties, self).__init__(**kwargs) + is_immutable_storage_with_versioning_enabled: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + :keyword last_modified: Required. + :paramtype last_modified: ~datetime.datetime + :keyword etag: Required. + :paramtype etag: str + :keyword lease_status: Known values are: "locked" and "unlocked". + :paramtype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :keyword lease_state: Known values are: "available", "leased", "expired", "breaking", and + "broken". + :paramtype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :keyword lease_duration: Known values are: "infinite" and "fixed". + :paramtype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :keyword public_access: Known values are: "container" and "blob". + :paramtype public_access: str or ~azure.storage.blob.models.PublicAccessType + :keyword has_immutability_policy: + :paramtype has_immutability_policy: bool + :keyword has_legal_hold: + :paramtype has_legal_hold: bool + :keyword default_encryption_scope: + :paramtype default_encryption_scope: str + :keyword prevent_encryption_scope_override: + :paramtype prevent_encryption_scope_override: bool + :keyword deleted_time: + :paramtype deleted_time: ~datetime.datetime + :keyword remaining_retention_days: + :paramtype remaining_retention_days: int + :keyword is_immutable_storage_with_versioning_enabled: Indicates if version level worm is + enabled on this container. + :paramtype is_immutable_storage_with_versioning_enabled: bool + """ + super().__init__(**kwargs) self.last_modified = last_modified self.etag = etag self.lease_status = lease_status @@ -972,47 +1242,51 @@ def __init__( self.prevent_encryption_scope_override = prevent_encryption_scope_override self.deleted_time = deleted_time self.remaining_retention_days = remaining_retention_days - - -class CorsRule(msrest.serialization.Model): - """CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain. - - All required parameters must be populated in order to send to Azure. - - :param allowed_origins: Required. The origin domains that are permitted to make a request - against the storage service via CORS. The origin domain is the domain from which the request - originates. Note that the origin must be an exact case-sensitive match with the origin that the - user age sends to the service. You can also use the wildcard character '*' to allow all origin - domains to make requests via CORS. - :type allowed_origins: str - :param allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may - use for a CORS request. (comma separated). - :type allowed_methods: str - :param allowed_headers: Required. the request headers that the origin domain may specify on the - CORS request. - :type allowed_headers: str - :param exposed_headers: Required. The response headers that may be sent in the response to the - CORS request and exposed by the browser to the request issuer. - :type exposed_headers: str - :param max_age_in_seconds: Required. The maximum amount time that a browser should cache the - preflight OPTIONS request. - :type max_age_in_seconds: int + self.is_immutable_storage_with_versioning_enabled = is_immutable_storage_with_versioning_enabled + + +class CorsRule(_serialization.Model): + """CORS is an HTTP feature that enables a web application running under one domain to access + resources in another domain. Web browsers implement a security restriction known as same-origin + policy that prevents a web page from calling APIs in a different domain; CORS provides a secure + way to allow one domain (the origin domain) to call APIs in another domain. + + All required parameters must be populated in order to send to server. + + :ivar allowed_origins: The origin domains that are permitted to make a request against the + storage service via CORS. The origin domain is the domain from which the request originates. + Note that the origin must be an exact case-sensitive match with the origin that the user age + sends to the service. You can also use the wildcard character '*' to allow all origin domains + to make requests via CORS. Required. + :vartype allowed_origins: str + :ivar allowed_methods: The methods (HTTP request verbs) that the origin domain may use for a + CORS request. (comma separated). Required. + :vartype allowed_methods: str + :ivar allowed_headers: the request headers that the origin domain may specify on the CORS + request. Required. + :vartype allowed_headers: str + :ivar exposed_headers: The response headers that may be sent in the response to the CORS + request and exposed by the browser to the request issuer. Required. + :vartype exposed_headers: str + :ivar max_age_in_seconds: The maximum amount time that a browser should cache the preflight + OPTIONS request. Required. + :vartype max_age_in_seconds: int """ _validation = { - 'allowed_origins': {'required': True}, - 'allowed_methods': {'required': True}, - 'allowed_headers': {'required': True}, - 'exposed_headers': {'required': True}, - 'max_age_in_seconds': {'required': True, 'minimum': 0}, + "allowed_origins": {"required": True}, + "allowed_methods": {"required": True}, + "allowed_headers": {"required": True}, + "exposed_headers": {"required": True}, + "max_age_in_seconds": {"required": True, "minimum": 0}, } _attribute_map = { - 'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str'}, - 'allowed_methods': {'key': 'AllowedMethods', 'type': 'str'}, - 'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str'}, - 'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str'}, - 'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int'}, + "allowed_origins": {"key": "AllowedOrigins", "type": "str"}, + "allowed_methods": {"key": "AllowedMethods", "type": "str"}, + "allowed_headers": {"key": "AllowedHeaders", "type": "str"}, + "exposed_headers": {"key": "ExposedHeaders", "type": "str"}, + "max_age_in_seconds": {"key": "MaxAgeInSeconds", "type": "int"}, } def __init__( @@ -1023,9 +1297,29 @@ def __init__( allowed_headers: str, exposed_headers: str, max_age_in_seconds: int, - **kwargs - ): - super(CorsRule, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword allowed_origins: The origin domains that are permitted to make a request against the + storage service via CORS. The origin domain is the domain from which the request originates. + Note that the origin must be an exact case-sensitive match with the origin that the user age + sends to the service. You can also use the wildcard character '*' to allow all origin domains + to make requests via CORS. Required. + :paramtype allowed_origins: str + :keyword allowed_methods: The methods (HTTP request verbs) that the origin domain may use for a + CORS request. (comma separated). Required. + :paramtype allowed_methods: str + :keyword allowed_headers: the request headers that the origin domain may specify on the CORS + request. Required. + :paramtype allowed_headers: str + :keyword exposed_headers: The response headers that may be sent in the response to the CORS + request and exposed by the browser to the request issuer. Required. + :paramtype exposed_headers: str + :keyword max_age_in_seconds: The maximum amount time that a browser should cache the preflight + OPTIONS request. Required. + :paramtype max_age_in_seconds: int + """ + super().__init__(**kwargs) self.allowed_origins = allowed_origins self.allowed_methods = allowed_methods self.allowed_headers = allowed_headers @@ -1033,21 +1327,26 @@ def __init__( self.max_age_in_seconds = max_age_in_seconds -class CpkInfo(msrest.serialization.Model): +class CpkInfo(_serialization.Model): """Parameter group. - :param encryption_key: Optional. Specifies the encryption key to use to encrypt the data + :ivar encryption_key: Optional. Specifies the encryption key to use to encrypt the data provided in the request. If not specified, encryption is performed with the root account encryption key. For more information, see Encryption at Rest for Azure Storage Services. - :type encryption_key: str - :param encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be provided + :vartype encryption_key: str + :ivar encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be provided if the x-ms-encryption-key header is provided. - :type encryption_key_sha256: str + :vartype encryption_key_sha256: str + :ivar encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, + the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is + provided. Known values are: "None" and "AES256". + :vartype encryption_algorithm: str or ~azure.storage.blob.models.EncryptionAlgorithmType """ _attribute_map = { - 'encryption_key': {'key': 'encryptionKey', 'type': 'str'}, - 'encryption_key_sha256': {'key': 'encryptionKeySha256', 'type': 'str'}, + "encryption_key": {"key": "encryptionKey", "type": "str"}, + "encryption_key_sha256": {"key": "encryptionKeySha256", "type": "str"}, + "encryption_algorithm": {"key": "encryptionAlgorithm", "type": "str"}, } def __init__( @@ -1055,131 +1354,102 @@ def __init__( *, encryption_key: Optional[str] = None, encryption_key_sha256: Optional[str] = None, - **kwargs - ): - super(CpkInfo, self).__init__(**kwargs) + encryption_algorithm: Optional[Union[str, "_models.EncryptionAlgorithmType"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. For more information, see Encryption at Rest for Azure Storage Services. + :paramtype encryption_key: str + :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be + provided if the x-ms-encryption-key header is provided. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. + Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key + header is provided. Known values are: "None" and "AES256". + :paramtype encryption_algorithm: str or ~azure.storage.blob.models.EncryptionAlgorithmType + """ + super().__init__(**kwargs) self.encryption_key = encryption_key self.encryption_key_sha256 = encryption_key_sha256 + self.encryption_algorithm = encryption_algorithm -class CpkScopeInfo(msrest.serialization.Model): +class CpkScopeInfo(_serialization.Model): """Parameter group. - :param encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the + :ivar encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. - :type encryption_scope: str + :vartype encryption_scope: str """ _attribute_map = { - 'encryption_scope': {'key': 'encryptionScope', 'type': 'str'}, - } - - def __init__( - self, - *, - encryption_scope: Optional[str] = None, - **kwargs - ): - super(CpkScopeInfo, self).__init__(**kwargs) + "encryption_scope": {"key": "encryptionScope", "type": "str"}, + } + + def __init__(self, *, encryption_scope: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the + encryption scope to use to encrypt the data provided in the request. If not specified, + encryption is performed with the default account encryption scope. For more information, see + Encryption at Rest for Azure Storage Services. + :paramtype encryption_scope: str + """ + super().__init__(**kwargs) self.encryption_scope = encryption_scope -class DataLakeStorageError(msrest.serialization.Model): - """DataLakeStorageError. - - :param data_lake_storage_error_details: The service error response object. - :type data_lake_storage_error_details: ~azure.storage.blob.models.DataLakeStorageErrorDetails - """ - - _attribute_map = { - 'data_lake_storage_error_details': {'key': 'error', 'type': 'DataLakeStorageErrorDetails'}, - } - - def __init__( - self, - *, - data_lake_storage_error_details: Optional["DataLakeStorageErrorDetails"] = None, - **kwargs - ): - super(DataLakeStorageError, self).__init__(**kwargs) - self.data_lake_storage_error_details = data_lake_storage_error_details - - -class DataLakeStorageErrorDetails(msrest.serialization.Model): - """The service error response object. - - :param code: The service error code. - :type code: str - :param message: The service error message. - :type message: str +class DelimitedTextConfiguration(_serialization.Model): + """Groups the settings used for interpreting the blob data if the blob is delimited text + formatted. + + :ivar column_separator: The string used to separate columns. + :vartype column_separator: str + :ivar field_quote: The string used to quote a specific field. + :vartype field_quote: str + :ivar record_separator: The string used to separate records. + :vartype record_separator: str + :ivar escape_char: The string used as an escape character. + :vartype escape_char: str + :ivar headers_present: Represents whether the data has headers. + :vartype headers_present: bool """ _attribute_map = { - 'code': {'key': 'Code', 'type': 'str'}, - 'message': {'key': 'Message', 'type': 'str'}, + "column_separator": {"key": "ColumnSeparator", "type": "str", "xml": {"name": "ColumnSeparator"}}, + "field_quote": {"key": "FieldQuote", "type": "str", "xml": {"name": "FieldQuote"}}, + "record_separator": {"key": "RecordSeparator", "type": "str", "xml": {"name": "RecordSeparator"}}, + "escape_char": {"key": "EscapeChar", "type": "str", "xml": {"name": "EscapeChar"}}, + "headers_present": {"key": "HeadersPresent", "type": "bool", "xml": {"name": "HasHeaders"}}, } + _xml_map = {"name": "DelimitedTextConfiguration"} def __init__( self, *, - code: Optional[str] = None, - message: Optional[str] = None, - **kwargs - ): - super(DataLakeStorageErrorDetails, self).__init__(**kwargs) - self.code = code - self.message = message - - -class DelimitedTextConfiguration(msrest.serialization.Model): - """delimited text configuration. - - All required parameters must be populated in order to send to Azure. - - :param column_separator: Required. column separator. - :type column_separator: str - :param field_quote: Required. field quote. - :type field_quote: str - :param record_separator: Required. record separator. - :type record_separator: str - :param escape_char: Required. escape char. - :type escape_char: str - :param headers_present: Required. has headers. - :type headers_present: bool - """ - - _validation = { - 'column_separator': {'required': True}, - 'field_quote': {'required': True}, - 'record_separator': {'required': True}, - 'escape_char': {'required': True}, - 'headers_present': {'required': True}, - } - - _attribute_map = { - 'column_separator': {'key': 'ColumnSeparator', 'type': 'str', 'xml': {'name': 'ColumnSeparator'}}, - 'field_quote': {'key': 'FieldQuote', 'type': 'str', 'xml': {'name': 'FieldQuote'}}, - 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, - 'escape_char': {'key': 'EscapeChar', 'type': 'str', 'xml': {'name': 'EscapeChar'}}, - 'headers_present': {'key': 'HeadersPresent', 'type': 'bool', 'xml': {'name': 'HasHeaders'}}, - } - _xml_map = { - 'name': 'DelimitedTextConfiguration' - } - - def __init__( - self, - *, - column_separator: str, - field_quote: str, - record_separator: str, - escape_char: str, - headers_present: bool, - **kwargs - ): - super(DelimitedTextConfiguration, self).__init__(**kwargs) + column_separator: Optional[str] = None, + field_quote: Optional[str] = None, + record_separator: Optional[str] = None, + escape_char: Optional[str] = None, + headers_present: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + :keyword column_separator: The string used to separate columns. + :paramtype column_separator: str + :keyword field_quote: The string used to quote a specific field. + :paramtype field_quote: str + :keyword record_separator: The string used to separate records. + :paramtype record_separator: str + :keyword escape_char: The string used as an escape character. + :paramtype escape_char: str + :keyword headers_present: Represents whether the data has headers. + :paramtype headers_present: bool + """ + super().__init__(**kwargs) self.column_separator = column_separator self.field_quote = field_quote self.record_separator = record_separator @@ -1187,309 +1457,314 @@ def __init__( self.headers_present = headers_present -class DirectoryHttpHeaders(msrest.serialization.Model): - """Parameter group. - - :param cache_control: Cache control for given resource. - :type cache_control: str - :param content_type: Content type for given resource. - :type content_type: str - :param content_encoding: Content encoding for given resource. - :type content_encoding: str - :param content_language: Content language for given resource. - :type content_language: str - :param content_disposition: Content disposition for given resource. - :type content_disposition: str - """ - - _attribute_map = { - 'cache_control': {'key': 'cacheControl', 'type': 'str'}, - 'content_type': {'key': 'contentType', 'type': 'str'}, - 'content_encoding': {'key': 'contentEncoding', 'type': 'str'}, - 'content_language': {'key': 'contentLanguage', 'type': 'str'}, - 'content_disposition': {'key': 'contentDisposition', 'type': 'str'}, - } - - def __init__( - self, - *, - cache_control: Optional[str] = None, - content_type: Optional[str] = None, - content_encoding: Optional[str] = None, - content_language: Optional[str] = None, - content_disposition: Optional[str] = None, - **kwargs - ): - super(DirectoryHttpHeaders, self).__init__(**kwargs) - self.cache_control = cache_control - self.content_type = content_type - self.content_encoding = content_encoding - self.content_language = content_language - self.content_disposition = content_disposition - - -class FilterBlobItem(msrest.serialization.Model): +class FilterBlobItem(_serialization.Model): """Blob info from a Filter Blobs API call. - All required parameters must be populated in order to send to Azure. - - :param name: Required. - :type name: str - :param container_name: Required. - :type container_name: str - :param tags: A set of tags. Blob tags. - :type tags: ~azure.storage.blob.models.BlobTags + All required parameters must be populated in order to send to server. + + :ivar name: Required. + :vartype name: str + :ivar container_name: Required. + :vartype container_name: str + :ivar tags: Blob tags. + :vartype tags: ~azure.storage.blob.models.BlobTags + :ivar version_id: + :vartype version_id: str + :ivar is_current_version: + :vartype is_current_version: bool """ _validation = { - 'name': {'required': True}, - 'container_name': {'required': True}, + "name": {"required": True}, + "container_name": {"required": True}, } _attribute_map = { - 'name': {'key': 'Name', 'type': 'str'}, - 'container_name': {'key': 'ContainerName', 'type': 'str'}, - 'tags': {'key': 'Tags', 'type': 'BlobTags'}, - } - _xml_map = { - 'name': 'Blob' + "name": {"key": "Name", "type": "str"}, + "container_name": {"key": "ContainerName", "type": "str"}, + "tags": {"key": "Tags", "type": "BlobTags"}, + "version_id": {"key": "VersionId", "type": "str"}, + "is_current_version": {"key": "IsCurrentVersion", "type": "bool"}, } + _xml_map = {"name": "Blob"} def __init__( self, *, name: str, container_name: str, - tags: Optional["BlobTags"] = None, - **kwargs - ): - super(FilterBlobItem, self).__init__(**kwargs) + tags: Optional["_models.BlobTags"] = None, + version_id: Optional[str] = None, + is_current_version: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + :keyword name: Required. + :paramtype name: str + :keyword container_name: Required. + :paramtype container_name: str + :keyword tags: Blob tags. + :paramtype tags: ~azure.storage.blob.models.BlobTags + :keyword version_id: + :paramtype version_id: str + :keyword is_current_version: + :paramtype is_current_version: bool + """ + super().__init__(**kwargs) self.name = name self.container_name = container_name self.tags = tags + self.version_id = version_id + self.is_current_version = is_current_version -class FilterBlobSegment(msrest.serialization.Model): +class FilterBlobSegment(_serialization.Model): """The result of a Filter Blobs API call. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param service_endpoint: Required. - :type service_endpoint: str - :param where: Required. - :type where: str - :param blobs: Required. - :type blobs: list[~azure.storage.blob.models.FilterBlobItem] - :param next_marker: - :type next_marker: str + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar where: Required. + :vartype where: str + :ivar blobs: Required. + :vartype blobs: list[~azure.storage.blob.models.FilterBlobItem] + :ivar next_marker: + :vartype next_marker: str """ _validation = { - 'service_endpoint': {'required': True}, - 'where': {'required': True}, - 'blobs': {'required': True}, + "service_endpoint": {"required": True}, + "where": {"required": True}, + "blobs": {"required": True}, } _attribute_map = { - 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, - 'where': {'key': 'Where', 'type': 'str'}, - 'blobs': {'key': 'Blobs', 'type': '[FilterBlobItem]', 'xml': {'name': 'Blobs', 'wrapped': True, 'itemsName': 'Blob'}}, - 'next_marker': {'key': 'NextMarker', 'type': 'str'}, - } - _xml_map = { - 'name': 'EnumerationResults' + "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, + "where": {"key": "Where", "type": "str"}, + "blobs": { + "key": "Blobs", + "type": "[FilterBlobItem]", + "xml": {"name": "Blobs", "wrapped": True, "itemsName": "Blob"}, + }, + "next_marker": {"key": "NextMarker", "type": "str"}, } + _xml_map = {"name": "EnumerationResults"} def __init__( self, *, service_endpoint: str, where: str, - blobs: List["FilterBlobItem"], + blobs: List["_models.FilterBlobItem"], next_marker: Optional[str] = None, - **kwargs - ): - super(FilterBlobSegment, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword where: Required. + :paramtype where: str + :keyword blobs: Required. + :paramtype blobs: list[~azure.storage.blob.models.FilterBlobItem] + :keyword next_marker: + :paramtype next_marker: str + """ + super().__init__(**kwargs) self.service_endpoint = service_endpoint self.where = where self.blobs = blobs self.next_marker = next_marker -class GeoReplication(msrest.serialization.Model): +class GeoReplication(_serialization.Model): """Geo-Replication information for the Secondary Storage Service. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param status: Required. The status of the secondary location. Possible values include: "live", - "bootstrap", "unavailable". - :type status: str or ~azure.storage.blob.models.GeoReplicationStatusType - :param last_sync_time: Required. A GMT date/time value, to the second. All primary writes - preceding this value are guaranteed to be available for read operations at the secondary. - Primary writes after this point in time may or may not be available for reads. - :type last_sync_time: ~datetime.datetime + :ivar status: The status of the secondary location. Required. Known values are: "live", + "bootstrap", and "unavailable". + :vartype status: str or ~azure.storage.blob.models.GeoReplicationStatusType + :ivar last_sync_time: A GMT date/time value, to the second. All primary writes preceding this + value are guaranteed to be available for read operations at the secondary. Primary writes after + this point in time may or may not be available for reads. Required. + :vartype last_sync_time: ~datetime.datetime """ _validation = { - 'status': {'required': True}, - 'last_sync_time': {'required': True}, + "status": {"required": True}, + "last_sync_time": {"required": True}, } _attribute_map = { - 'status': {'key': 'Status', 'type': 'str'}, - 'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123'}, + "status": {"key": "Status", "type": "str"}, + "last_sync_time": {"key": "LastSyncTime", "type": "rfc-1123"}, } def __init__( self, *, - status: Union[str, "GeoReplicationStatusType"], + status: Union[str, "_models.GeoReplicationStatusType"], last_sync_time: datetime.datetime, - **kwargs - ): - super(GeoReplication, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword status: The status of the secondary location. Required. Known values are: "live", + "bootstrap", and "unavailable". + :paramtype status: str or ~azure.storage.blob.models.GeoReplicationStatusType + :keyword last_sync_time: A GMT date/time value, to the second. All primary writes preceding + this value are guaranteed to be available for read operations at the secondary. Primary writes + after this point in time may or may not be available for reads. Required. + :paramtype last_sync_time: ~datetime.datetime + """ + super().__init__(**kwargs) self.status = status self.last_sync_time = last_sync_time -class JsonTextConfiguration(msrest.serialization.Model): +class JsonTextConfiguration(_serialization.Model): """json text configuration. - All required parameters must be populated in order to send to Azure. - - :param record_separator: Required. record separator. - :type record_separator: str + :ivar record_separator: The string used to separate records. + :vartype record_separator: str """ - _validation = { - 'record_separator': {'required': True}, - } - _attribute_map = { - 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, - } - _xml_map = { - 'name': 'JsonTextConfiguration' + "record_separator": {"key": "RecordSeparator", "type": "str", "xml": {"name": "RecordSeparator"}}, } + _xml_map = {"name": "JsonTextConfiguration"} - def __init__( - self, - *, - record_separator: str, - **kwargs - ): - super(JsonTextConfiguration, self).__init__(**kwargs) + def __init__(self, *, record_separator: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword record_separator: The string used to separate records. + :paramtype record_separator: str + """ + super().__init__(**kwargs) self.record_separator = record_separator -class KeyInfo(msrest.serialization.Model): +class KeyInfo(_serialization.Model): """Key information. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param start: Required. The date-time the key is active in ISO 8601 UTC time. - :type start: str - :param expiry: Required. The date-time the key expires in ISO 8601 UTC time. - :type expiry: str + :ivar start: The date-time the key is active in ISO 8601 UTC time. Required. + :vartype start: str + :ivar expiry: The date-time the key expires in ISO 8601 UTC time. Required. + :vartype expiry: str """ _validation = { - 'start': {'required': True}, - 'expiry': {'required': True}, + "start": {"required": True}, + "expiry": {"required": True}, } _attribute_map = { - 'start': {'key': 'Start', 'type': 'str'}, - 'expiry': {'key': 'Expiry', 'type': 'str'}, - } - - def __init__( - self, - *, - start: str, - expiry: str, - **kwargs - ): - super(KeyInfo, self).__init__(**kwargs) + "start": {"key": "Start", "type": "str"}, + "expiry": {"key": "Expiry", "type": "str"}, + } + + def __init__(self, *, start: str, expiry: str, **kwargs: Any) -> None: + """ + :keyword start: The date-time the key is active in ISO 8601 UTC time. Required. + :paramtype start: str + :keyword expiry: The date-time the key expires in ISO 8601 UTC time. Required. + :paramtype expiry: str + """ + super().__init__(**kwargs) self.start = start self.expiry = expiry -class LeaseAccessConditions(msrest.serialization.Model): +class LeaseAccessConditions(_serialization.Model): """Parameter group. - :param lease_id: If specified, the operation only succeeds if the resource's lease is active - and matches this ID. - :type lease_id: str + :ivar lease_id: If specified, the operation only succeeds if the resource's lease is active and + matches this ID. + :vartype lease_id: str """ _attribute_map = { - 'lease_id': {'key': 'leaseId', 'type': 'str'}, + "lease_id": {"key": "leaseId", "type": "str"}, } - def __init__( - self, - *, - lease_id: Optional[str] = None, - **kwargs - ): - super(LeaseAccessConditions, self).__init__(**kwargs) + def __init__(self, *, lease_id: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. + :paramtype lease_id: str + """ + super().__init__(**kwargs) self.lease_id = lease_id -class ListBlobsFlatSegmentResponse(msrest.serialization.Model): +class ListBlobsFlatSegmentResponse(_serialization.Model): """An enumeration of blobs. - All required parameters must be populated in order to send to Azure. - - :param service_endpoint: Required. - :type service_endpoint: str - :param container_name: Required. - :type container_name: str - :param prefix: - :type prefix: str - :param marker: - :type marker: str - :param max_results: - :type max_results: int - :param segment: Required. - :type segment: ~azure.storage.blob.models.BlobFlatListSegment - :param next_marker: - :type next_marker: str + All required parameters must be populated in order to send to server. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar container_name: Required. + :vartype container_name: str + :ivar prefix: + :vartype prefix: str + :ivar marker: + :vartype marker: str + :ivar max_results: + :vartype max_results: int + :ivar segment: Required. + :vartype segment: ~azure.storage.blob.models.BlobFlatListSegment + :ivar next_marker: + :vartype next_marker: str """ _validation = { - 'service_endpoint': {'required': True}, - 'container_name': {'required': True}, - 'segment': {'required': True}, + "service_endpoint": {"required": True}, + "container_name": {"required": True}, + "segment": {"required": True}, } _attribute_map = { - 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, - 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, - 'prefix': {'key': 'Prefix', 'type': 'str'}, - 'marker': {'key': 'Marker', 'type': 'str'}, - 'max_results': {'key': 'MaxResults', 'type': 'int'}, - 'segment': {'key': 'Segment', 'type': 'BlobFlatListSegment'}, - 'next_marker': {'key': 'NextMarker', 'type': 'str'}, - } - _xml_map = { - 'name': 'EnumerationResults' + "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, + "container_name": {"key": "ContainerName", "type": "str", "xml": {"attr": True}}, + "prefix": {"key": "Prefix", "type": "str"}, + "marker": {"key": "Marker", "type": "str"}, + "max_results": {"key": "MaxResults", "type": "int"}, + "segment": {"key": "Segment", "type": "BlobFlatListSegment"}, + "next_marker": {"key": "NextMarker", "type": "str"}, } + _xml_map = {"name": "EnumerationResults"} def __init__( self, *, service_endpoint: str, container_name: str, - segment: "BlobFlatListSegment", + segment: "_models.BlobFlatListSegment", prefix: Optional[str] = None, marker: Optional[str] = None, max_results: Optional[int] = None, next_marker: Optional[str] = None, - **kwargs - ): - super(ListBlobsFlatSegmentResponse, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword container_name: Required. + :paramtype container_name: str + :keyword prefix: + :paramtype prefix: str + :keyword marker: + :paramtype marker: str + :keyword max_results: + :paramtype max_results: int + :keyword segment: Required. + :paramtype segment: ~azure.storage.blob.models.BlobFlatListSegment + :keyword next_marker: + :paramtype next_marker: str + """ + super().__init__(**kwargs) self.service_endpoint = service_endpoint self.container_name = container_name self.prefix = prefix @@ -1499,63 +1774,79 @@ def __init__( self.next_marker = next_marker -class ListBlobsHierarchySegmentResponse(msrest.serialization.Model): +class ListBlobsHierarchySegmentResponse(_serialization.Model): """An enumeration of blobs. - All required parameters must be populated in order to send to Azure. - - :param service_endpoint: Required. - :type service_endpoint: str - :param container_name: Required. - :type container_name: str - :param prefix: - :type prefix: str - :param marker: - :type marker: str - :param max_results: - :type max_results: int - :param delimiter: - :type delimiter: str - :param segment: Required. - :type segment: ~azure.storage.blob.models.BlobHierarchyListSegment - :param next_marker: - :type next_marker: str + All required parameters must be populated in order to send to server. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar container_name: Required. + :vartype container_name: str + :ivar prefix: + :vartype prefix: str + :ivar marker: + :vartype marker: str + :ivar max_results: + :vartype max_results: int + :ivar delimiter: + :vartype delimiter: str + :ivar segment: Required. + :vartype segment: ~azure.storage.blob.models.BlobHierarchyListSegment + :ivar next_marker: + :vartype next_marker: str """ _validation = { - 'service_endpoint': {'required': True}, - 'container_name': {'required': True}, - 'segment': {'required': True}, + "service_endpoint": {"required": True}, + "container_name": {"required": True}, + "segment": {"required": True}, } _attribute_map = { - 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, - 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, - 'prefix': {'key': 'Prefix', 'type': 'str'}, - 'marker': {'key': 'Marker', 'type': 'str'}, - 'max_results': {'key': 'MaxResults', 'type': 'int'}, - 'delimiter': {'key': 'Delimiter', 'type': 'str'}, - 'segment': {'key': 'Segment', 'type': 'BlobHierarchyListSegment'}, - 'next_marker': {'key': 'NextMarker', 'type': 'str'}, - } - _xml_map = { - 'name': 'EnumerationResults' + "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, + "container_name": {"key": "ContainerName", "type": "str", "xml": {"attr": True}}, + "prefix": {"key": "Prefix", "type": "str"}, + "marker": {"key": "Marker", "type": "str"}, + "max_results": {"key": "MaxResults", "type": "int"}, + "delimiter": {"key": "Delimiter", "type": "str"}, + "segment": {"key": "Segment", "type": "BlobHierarchyListSegment"}, + "next_marker": {"key": "NextMarker", "type": "str"}, } + _xml_map = {"name": "EnumerationResults"} def __init__( self, *, service_endpoint: str, container_name: str, - segment: "BlobHierarchyListSegment", + segment: "_models.BlobHierarchyListSegment", prefix: Optional[str] = None, marker: Optional[str] = None, max_results: Optional[int] = None, delimiter: Optional[str] = None, next_marker: Optional[str] = None, - **kwargs - ): - super(ListBlobsHierarchySegmentResponse, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword container_name: Required. + :paramtype container_name: str + :keyword prefix: + :paramtype prefix: str + :keyword marker: + :paramtype marker: str + :keyword max_results: + :paramtype max_results: int + :keyword delimiter: + :paramtype delimiter: str + :keyword segment: Required. + :paramtype segment: ~azure.storage.blob.models.BlobHierarchyListSegment + :keyword next_marker: + :paramtype next_marker: str + """ + super().__init__(**kwargs) self.service_endpoint = service_endpoint self.container_name = container_name self.prefix = prefix @@ -1566,54 +1857,70 @@ def __init__( self.next_marker = next_marker -class ListContainersSegmentResponse(msrest.serialization.Model): +class ListContainersSegmentResponse(_serialization.Model): """An enumeration of containers. - All required parameters must be populated in order to send to Azure. - - :param service_endpoint: Required. - :type service_endpoint: str - :param prefix: - :type prefix: str - :param marker: - :type marker: str - :param max_results: - :type max_results: int - :param container_items: Required. - :type container_items: list[~azure.storage.blob.models.ContainerItem] - :param next_marker: - :type next_marker: str + All required parameters must be populated in order to send to server. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar prefix: + :vartype prefix: str + :ivar marker: + :vartype marker: str + :ivar max_results: + :vartype max_results: int + :ivar container_items: Required. + :vartype container_items: list[~azure.storage.blob.models.ContainerItem] + :ivar next_marker: + :vartype next_marker: str """ _validation = { - 'service_endpoint': {'required': True}, - 'container_items': {'required': True}, + "service_endpoint": {"required": True}, + "container_items": {"required": True}, } _attribute_map = { - 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, - 'prefix': {'key': 'Prefix', 'type': 'str'}, - 'marker': {'key': 'Marker', 'type': 'str'}, - 'max_results': {'key': 'MaxResults', 'type': 'int'}, - 'container_items': {'key': 'ContainerItems', 'type': '[ContainerItem]', 'xml': {'name': 'Containers', 'wrapped': True, 'itemsName': 'Container'}}, - 'next_marker': {'key': 'NextMarker', 'type': 'str'}, - } - _xml_map = { - 'name': 'EnumerationResults' - } + "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, + "prefix": {"key": "Prefix", "type": "str"}, + "marker": {"key": "Marker", "type": "str"}, + "max_results": {"key": "MaxResults", "type": "int"}, + "container_items": { + "key": "ContainerItems", + "type": "[ContainerItem]", + "xml": {"name": "Containers", "wrapped": True, "itemsName": "Container"}, + }, + "next_marker": {"key": "NextMarker", "type": "str"}, + } + _xml_map = {"name": "EnumerationResults"} def __init__( self, *, service_endpoint: str, - container_items: List["ContainerItem"], + container_items: List["_models.ContainerItem"], prefix: Optional[str] = None, marker: Optional[str] = None, max_results: Optional[int] = None, next_marker: Optional[str] = None, - **kwargs - ): - super(ListContainersSegmentResponse, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword prefix: + :paramtype prefix: str + :keyword marker: + :paramtype marker: str + :keyword max_results: + :paramtype max_results: int + :keyword container_items: Required. + :paramtype container_items: list[~azure.storage.blob.models.ContainerItem] + :keyword next_marker: + :paramtype next_marker: str + """ + super().__init__(**kwargs) self.service_endpoint = service_endpoint self.prefix = prefix self.marker = marker @@ -1622,38 +1929,38 @@ def __init__( self.next_marker = next_marker -class Logging(msrest.serialization.Model): +class Logging(_serialization.Model): """Azure Analytics Logging settings. - All required parameters must be populated in order to send to Azure. - - :param version: Required. The version of Storage Analytics to configure. - :type version: str - :param delete: Required. Indicates whether all delete requests should be logged. - :type delete: bool - :param read: Required. Indicates whether all read requests should be logged. - :type read: bool - :param write: Required. Indicates whether all write requests should be logged. - :type write: bool - :param retention_policy: Required. the retention policy which determines how long the - associated data should persist. - :type retention_policy: ~azure.storage.blob.models.RetentionPolicy + All required parameters must be populated in order to send to server. + + :ivar version: The version of Storage Analytics to configure. Required. + :vartype version: str + :ivar delete: Indicates whether all delete requests should be logged. Required. + :vartype delete: bool + :ivar read: Indicates whether all read requests should be logged. Required. + :vartype read: bool + :ivar write: Indicates whether all write requests should be logged. Required. + :vartype write: bool + :ivar retention_policy: the retention policy which determines how long the associated data + should persist. Required. + :vartype retention_policy: ~azure.storage.blob.models.RetentionPolicy """ _validation = { - 'version': {'required': True}, - 'delete': {'required': True}, - 'read': {'required': True}, - 'write': {'required': True}, - 'retention_policy': {'required': True}, + "version": {"required": True}, + "delete": {"required": True}, + "read": {"required": True}, + "write": {"required": True}, + "retention_policy": {"required": True}, } _attribute_map = { - 'version': {'key': 'Version', 'type': 'str'}, - 'delete': {'key': 'Delete', 'type': 'bool'}, - 'read': {'key': 'Read', 'type': 'bool'}, - 'write': {'key': 'Write', 'type': 'bool'}, - 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + "version": {"key": "Version", "type": "str"}, + "delete": {"key": "Delete", "type": "bool"}, + "read": {"key": "Read", "type": "bool"}, + "write": {"key": "Write", "type": "bool"}, + "retention_policy": {"key": "RetentionPolicy", "type": "RetentionPolicy"}, } def __init__( @@ -1663,10 +1970,23 @@ def __init__( delete: bool, read: bool, write: bool, - retention_policy: "RetentionPolicy", - **kwargs - ): - super(Logging, self).__init__(**kwargs) + retention_policy: "_models.RetentionPolicy", + **kwargs: Any + ) -> None: + """ + :keyword version: The version of Storage Analytics to configure. Required. + :paramtype version: str + :keyword delete: Indicates whether all delete requests should be logged. Required. + :paramtype delete: bool + :keyword read: Indicates whether all read requests should be logged. Required. + :paramtype read: bool + :keyword write: Indicates whether all write requests should be logged. Required. + :paramtype write: bool + :keyword retention_policy: the retention policy which determines how long the associated data + should persist. Required. + :paramtype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + super().__init__(**kwargs) self.version = version self.delete = delete self.read = read @@ -1674,32 +1994,32 @@ def __init__( self.retention_policy = retention_policy -class Metrics(msrest.serialization.Model): +class Metrics(_serialization.Model): """a summary of request statistics grouped by API in hour or minute aggregates for blobs. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param version: The version of Storage Analytics to configure. - :type version: str - :param enabled: Required. Indicates whether metrics are enabled for the Blob service. - :type enabled: bool - :param include_apis: Indicates whether metrics should generate summary statistics for called - API operations. - :type include_apis: bool - :param retention_policy: the retention policy which determines how long the associated data + :ivar version: The version of Storage Analytics to configure. + :vartype version: str + :ivar enabled: Indicates whether metrics are enabled for the Blob service. Required. + :vartype enabled: bool + :ivar include_apis: Indicates whether metrics should generate summary statistics for called API + operations. + :vartype include_apis: bool + :ivar retention_policy: the retention policy which determines how long the associated data should persist. - :type retention_policy: ~azure.storage.blob.models.RetentionPolicy + :vartype retention_policy: ~azure.storage.blob.models.RetentionPolicy """ _validation = { - 'enabled': {'required': True}, + "enabled": {"required": True}, } _attribute_map = { - 'version': {'key': 'Version', 'type': 'str'}, - 'enabled': {'key': 'Enabled', 'type': 'bool'}, - 'include_apis': {'key': 'IncludeAPIs', 'type': 'bool'}, - 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + "version": {"key": "Version", "type": "str"}, + "enabled": {"key": "Enabled", "type": "bool"}, + "include_apis": {"key": "IncludeAPIs", "type": "bool"}, + "retention_policy": {"key": "RetentionPolicy", "type": "RetentionPolicy"}, } def __init__( @@ -1708,40 +2028,52 @@ def __init__( enabled: bool, version: Optional[str] = None, include_apis: Optional[bool] = None, - retention_policy: Optional["RetentionPolicy"] = None, - **kwargs - ): - super(Metrics, self).__init__(**kwargs) + retention_policy: Optional["_models.RetentionPolicy"] = None, + **kwargs: Any + ) -> None: + """ + :keyword version: The version of Storage Analytics to configure. + :paramtype version: str + :keyword enabled: Indicates whether metrics are enabled for the Blob service. Required. + :paramtype enabled: bool + :keyword include_apis: Indicates whether metrics should generate summary statistics for called + API operations. + :paramtype include_apis: bool + :keyword retention_policy: the retention policy which determines how long the associated data + should persist. + :paramtype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + super().__init__(**kwargs) self.version = version self.enabled = enabled self.include_apis = include_apis self.retention_policy = retention_policy -class ModifiedAccessConditions(msrest.serialization.Model): +class ModifiedAccessConditions(_serialization.Model): """Parameter group. - :param if_modified_since: Specify this header value to operate only on a blob if it has been + :ivar if_modified_since: Specify this header value to operate only on a blob if it has been modified since the specified date/time. - :type if_modified_since: ~datetime.datetime - :param if_unmodified_since: Specify this header value to operate only on a blob if it has not + :vartype if_modified_since: ~datetime.datetime + :ivar if_unmodified_since: Specify this header value to operate only on a blob if it has not been modified since the specified date/time. - :type if_unmodified_since: ~datetime.datetime - :param if_match: Specify an ETag value to operate only on blobs with a matching value. - :type if_match: str - :param if_none_match: Specify an ETag value to operate only on blobs without a matching value. - :type if_none_match: str - :param if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a - matching value. - :type if_tags: str + :vartype if_unmodified_since: ~datetime.datetime + :ivar if_match: Specify an ETag value to operate only on blobs with a matching value. + :vartype if_match: str + :ivar if_none_match: Specify an ETag value to operate only on blobs without a matching value. + :vartype if_none_match: str + :ivar if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a matching + value. + :vartype if_tags: str """ _attribute_map = { - 'if_modified_since': {'key': 'ifModifiedSince', 'type': 'rfc-1123'}, - 'if_unmodified_since': {'key': 'ifUnmodifiedSince', 'type': 'rfc-1123'}, - 'if_match': {'key': 'ifMatch', 'type': 'str'}, - 'if_none_match': {'key': 'ifNoneMatch', 'type': 'str'}, - 'if_tags': {'key': 'ifTags', 'type': 'str'}, + "if_modified_since": {"key": "ifModifiedSince", "type": "rfc-1123"}, + "if_unmodified_since": {"key": "ifUnmodifiedSince", "type": "rfc-1123"}, + "if_match": {"key": "ifMatch", "type": "str"}, + "if_none_match": {"key": "ifNoneMatch", "type": "str"}, + "if_tags": {"key": "ifTags", "type": "str"}, } def __init__( @@ -1752,9 +2084,25 @@ def __init__( if_match: Optional[str] = None, if_none_match: Optional[str] = None, if_tags: Optional[str] = None, - **kwargs - ): - super(ModifiedAccessConditions, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_match: Specify an ETag value to operate only on blobs with a matching value. + :paramtype if_match: str + :keyword if_none_match: Specify an ETag value to operate only on blobs without a matching + value. + :paramtype if_none_match: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. + :paramtype if_tags: str + """ + super().__init__(**kwargs) self.if_modified_since = if_modified_since self.if_unmodified_since = if_unmodified_since self.if_match = if_match @@ -1762,136 +2110,175 @@ def __init__( self.if_tags = if_tags -class PageList(msrest.serialization.Model): +class PageList(_serialization.Model): """the list of pages. - :param page_range: - :type page_range: list[~azure.storage.blob.models.PageRange] - :param clear_range: - :type clear_range: list[~azure.storage.blob.models.ClearRange] + :ivar page_range: + :vartype page_range: list[~azure.storage.blob.models.PageRange] + :ivar clear_range: + :vartype clear_range: list[~azure.storage.blob.models.ClearRange] + :ivar next_marker: + :vartype next_marker: str """ _attribute_map = { - 'page_range': {'key': 'PageRange', 'type': '[PageRange]'}, - 'clear_range': {'key': 'ClearRange', 'type': '[ClearRange]'}, + "page_range": {"key": "PageRange", "type": "[PageRange]", "xml": {"itemsName": "PageRange"}}, + "clear_range": {"key": "ClearRange", "type": "[ClearRange]", "xml": {"itemsName": "ClearRange"}}, + "next_marker": {"key": "NextMarker", "type": "str"}, } def __init__( self, *, - page_range: Optional[List["PageRange"]] = None, - clear_range: Optional[List["ClearRange"]] = None, - **kwargs - ): - super(PageList, self).__init__(**kwargs) + page_range: Optional[List["_models.PageRange"]] = None, + clear_range: Optional[List["_models.ClearRange"]] = None, + next_marker: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword page_range: + :paramtype page_range: list[~azure.storage.blob.models.PageRange] + :keyword clear_range: + :paramtype clear_range: list[~azure.storage.blob.models.ClearRange] + :keyword next_marker: + :paramtype next_marker: str + """ + super().__init__(**kwargs) self.page_range = page_range self.clear_range = clear_range + self.next_marker = next_marker -class PageRange(msrest.serialization.Model): +class PageRange(_serialization.Model): """PageRange. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param start: Required. - :type start: long - :param end: Required. - :type end: long + :ivar start: Required. + :vartype start: int + :ivar end: Required. + :vartype end: int """ _validation = { - 'start': {'required': True}, - 'end': {'required': True}, + "start": {"required": True}, + "end": {"required": True}, } _attribute_map = { - 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, - 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, - } - _xml_map = { - 'name': 'PageRange' - } - - def __init__( - self, - *, - start: int, - end: int, - **kwargs - ): - super(PageRange, self).__init__(**kwargs) + "start": {"key": "Start", "type": "int", "xml": {"name": "Start"}}, + "end": {"key": "End", "type": "int", "xml": {"name": "End"}}, + } + _xml_map = {"name": "PageRange"} + + def __init__(self, *, start: int, end: int, **kwargs: Any) -> None: + """ + :keyword start: Required. + :paramtype start: int + :keyword end: Required. + :paramtype end: int + """ + super().__init__(**kwargs) self.start = start self.end = end -class QueryFormat(msrest.serialization.Model): +class QueryFormat(_serialization.Model): """QueryFormat. - :param type: The quick query format type. Possible values include: "delimited", "json", - "arrow". - :type type: str or ~azure.storage.blob.models.QueryFormatType - :param delimited_text_configuration: delimited text configuration. - :type delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration - :param json_text_configuration: json text configuration. - :type json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration - :param arrow_configuration: arrow configuration. - :type arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration + All required parameters must be populated in order to send to server. + + :ivar type: The quick query format type. Required. Known values are: "delimited", "json", + "arrow", and "parquet". + :vartype type: str or ~azure.storage.blob.models.QueryFormatType + :ivar delimited_text_configuration: Groups the settings used for interpreting the blob data if + the blob is delimited text formatted. + :vartype delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration + :ivar json_text_configuration: json text configuration. + :vartype json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration + :ivar arrow_configuration: Groups the settings used for formatting the response if the response + should be Arrow formatted. + :vartype arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration + :ivar parquet_text_configuration: parquet configuration. + :vartype parquet_text_configuration: JSON """ + _validation = { + "type": {"required": True}, + } + _attribute_map = { - 'type': {'key': 'Type', 'type': 'str', 'xml': {'name': 'Type'}}, - 'delimited_text_configuration': {'key': 'DelimitedTextConfiguration', 'type': 'DelimitedTextConfiguration'}, - 'json_text_configuration': {'key': 'JsonTextConfiguration', 'type': 'JsonTextConfiguration'}, - 'arrow_configuration': {'key': 'ArrowConfiguration', 'type': 'ArrowConfiguration'}, + "type": {"key": "Type", "type": "str", "xml": {"name": "Type"}}, + "delimited_text_configuration": {"key": "DelimitedTextConfiguration", "type": "DelimitedTextConfiguration"}, + "json_text_configuration": {"key": "JsonTextConfiguration", "type": "JsonTextConfiguration"}, + "arrow_configuration": {"key": "ArrowConfiguration", "type": "ArrowConfiguration"}, + "parquet_text_configuration": {"key": "ParquetTextConfiguration", "type": "object"}, } def __init__( self, *, - type: Optional[Union[str, "QueryFormatType"]] = None, - delimited_text_configuration: Optional["DelimitedTextConfiguration"] = None, - json_text_configuration: Optional["JsonTextConfiguration"] = None, - arrow_configuration: Optional["ArrowConfiguration"] = None, - **kwargs - ): - super(QueryFormat, self).__init__(**kwargs) + type: Union[str, "_models.QueryFormatType"], + delimited_text_configuration: Optional["_models.DelimitedTextConfiguration"] = None, + json_text_configuration: Optional["_models.JsonTextConfiguration"] = None, + arrow_configuration: Optional["_models.ArrowConfiguration"] = None, + parquet_text_configuration: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword type: The quick query format type. Required. Known values are: "delimited", "json", + "arrow", and "parquet". + :paramtype type: str or ~azure.storage.blob.models.QueryFormatType + :keyword delimited_text_configuration: Groups the settings used for interpreting the blob data + if the blob is delimited text formatted. + :paramtype delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration + :keyword json_text_configuration: json text configuration. + :paramtype json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration + :keyword arrow_configuration: Groups the settings used for formatting the response if the + response should be Arrow formatted. + :paramtype arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration + :keyword parquet_text_configuration: parquet configuration. + :paramtype parquet_text_configuration: JSON + """ + super().__init__(**kwargs) self.type = type self.delimited_text_configuration = delimited_text_configuration self.json_text_configuration = json_text_configuration self.arrow_configuration = arrow_configuration + self.parquet_text_configuration = parquet_text_configuration -class QueryRequest(msrest.serialization.Model): - """the quick query body. +class QueryRequest(_serialization.Model): + """Groups the set of query request settings. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :ivar query_type: Required. the query type. Default value: "SQL". + :ivar query_type: Required. The type of the provided query expression. Required. Default value + is "SQL". :vartype query_type: str - :param expression: Required. a query statement. - :type expression: str - :param input_serialization: - :type input_serialization: ~azure.storage.blob.models.QuerySerialization - :param output_serialization: - :type output_serialization: ~azure.storage.blob.models.QuerySerialization + :ivar expression: The query expression in SQL. The maximum size of the query expression is + 256KiB. Required. + :vartype expression: str + :ivar input_serialization: + :vartype input_serialization: ~azure.storage.blob.models.QuerySerialization + :ivar output_serialization: + :vartype output_serialization: ~azure.storage.blob.models.QuerySerialization """ _validation = { - 'query_type': {'required': True, 'constant': True}, - 'expression': {'required': True}, + "query_type": {"required": True, "constant": True}, + "expression": {"required": True}, } _attribute_map = { - 'query_type': {'key': 'QueryType', 'type': 'str', 'xml': {'name': 'QueryType'}}, - 'expression': {'key': 'Expression', 'type': 'str', 'xml': {'name': 'Expression'}}, - 'input_serialization': {'key': 'InputSerialization', 'type': 'QuerySerialization'}, - 'output_serialization': {'key': 'OutputSerialization', 'type': 'QuerySerialization'}, - } - _xml_map = { - 'name': 'QueryRequest' + "query_type": {"key": "QueryType", "type": "str", "xml": {"name": "QueryType"}}, + "expression": {"key": "Expression", "type": "str", "xml": {"name": "Expression"}}, + "input_serialization": {"key": "InputSerialization", "type": "QuerySerialization"}, + "output_serialization": {"key": "OutputSerialization", "type": "QuerySerialization"}, } + _xml_map = {"name": "QueryRequest"} query_type = "SQL" @@ -1899,102 +2286,116 @@ def __init__( self, *, expression: str, - input_serialization: Optional["QuerySerialization"] = None, - output_serialization: Optional["QuerySerialization"] = None, - **kwargs - ): - super(QueryRequest, self).__init__(**kwargs) + input_serialization: Optional["_models.QuerySerialization"] = None, + output_serialization: Optional["_models.QuerySerialization"] = None, + **kwargs: Any + ) -> None: + """ + :keyword expression: The query expression in SQL. The maximum size of the query expression is + 256KiB. Required. + :paramtype expression: str + :keyword input_serialization: + :paramtype input_serialization: ~azure.storage.blob.models.QuerySerialization + :keyword output_serialization: + :paramtype output_serialization: ~azure.storage.blob.models.QuerySerialization + """ + super().__init__(**kwargs) self.expression = expression self.input_serialization = input_serialization self.output_serialization = output_serialization -class QuerySerialization(msrest.serialization.Model): +class QuerySerialization(_serialization.Model): """QuerySerialization. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param format: Required. - :type format: ~azure.storage.blob.models.QueryFormat + :ivar format: Required. + :vartype format: ~azure.storage.blob.models.QueryFormat """ _validation = { - 'format': {'required': True}, + "format": {"required": True}, } _attribute_map = { - 'format': {'key': 'Format', 'type': 'QueryFormat'}, + "format": {"key": "Format", "type": "QueryFormat"}, } - def __init__( - self, - *, - format: "QueryFormat", - **kwargs - ): - super(QuerySerialization, self).__init__(**kwargs) + def __init__(self, *, format: "_models.QueryFormat", **kwargs: Any) -> None: + """ + :keyword format: Required. + :paramtype format: ~azure.storage.blob.models.QueryFormat + """ + super().__init__(**kwargs) self.format = format -class RetentionPolicy(msrest.serialization.Model): +class RetentionPolicy(_serialization.Model): """the retention policy which determines how long the associated data should persist. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param enabled: Required. Indicates whether a retention policy is enabled for the storage - service. - :type enabled: bool - :param days: Indicates the number of days that metrics or logging or soft-deleted data should - be retained. All data older than this value will be deleted. - :type days: int - :param allow_permanent_delete: Indicates whether permanent delete is allowed on this storage + :ivar enabled: Indicates whether a retention policy is enabled for the storage service. + Required. + :vartype enabled: bool + :ivar days: Indicates the number of days that metrics or logging or soft-deleted data should be + retained. All data older than this value will be deleted. + :vartype days: int + :ivar allow_permanent_delete: Indicates whether permanent delete is allowed on this storage account. - :type allow_permanent_delete: bool + :vartype allow_permanent_delete: bool """ _validation = { - 'enabled': {'required': True}, - 'days': {'minimum': 1}, + "enabled": {"required": True}, + "days": {"minimum": 1}, } _attribute_map = { - 'enabled': {'key': 'Enabled', 'type': 'bool'}, - 'days': {'key': 'Days', 'type': 'int'}, - 'allow_permanent_delete': {'key': 'AllowPermanentDelete', 'type': 'bool'}, + "enabled": {"key": "Enabled", "type": "bool"}, + "days": {"key": "Days", "type": "int"}, + "allow_permanent_delete": {"key": "AllowPermanentDelete", "type": "bool"}, } def __init__( - self, - *, - enabled: bool, - days: Optional[int] = None, - allow_permanent_delete: Optional[bool] = None, - **kwargs - ): - super(RetentionPolicy, self).__init__(**kwargs) + self, *, enabled: bool, days: Optional[int] = None, allow_permanent_delete: Optional[bool] = None, **kwargs: Any + ) -> None: + """ + :keyword enabled: Indicates whether a retention policy is enabled for the storage service. + Required. + :paramtype enabled: bool + :keyword days: Indicates the number of days that metrics or logging or soft-deleted data should + be retained. All data older than this value will be deleted. + :paramtype days: int + :keyword allow_permanent_delete: Indicates whether permanent delete is allowed on this storage + account. + :paramtype allow_permanent_delete: bool + """ + super().__init__(**kwargs) self.enabled = enabled self.days = days self.allow_permanent_delete = allow_permanent_delete -class SequenceNumberAccessConditions(msrest.serialization.Model): +class SequenceNumberAccessConditions(_serialization.Model): """Parameter group. - :param if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on a + :ivar if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on a blob if it has a sequence number less than or equal to the specified. - :type if_sequence_number_less_than_or_equal_to: long - :param if_sequence_number_less_than: Specify this header value to operate only on a blob if it + :vartype if_sequence_number_less_than_or_equal_to: int + :ivar if_sequence_number_less_than: Specify this header value to operate only on a blob if it has a sequence number less than the specified. - :type if_sequence_number_less_than: long - :param if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + :vartype if_sequence_number_less_than: int + :ivar if_sequence_number_equal_to: Specify this header value to operate only on a blob if it has the specified sequence number. - :type if_sequence_number_equal_to: long + :vartype if_sequence_number_equal_to: int """ _attribute_map = { - 'if_sequence_number_less_than_or_equal_to': {'key': 'ifSequenceNumberLessThanOrEqualTo', 'type': 'long'}, - 'if_sequence_number_less_than': {'key': 'ifSequenceNumberLessThan', 'type': 'long'}, - 'if_sequence_number_equal_to': {'key': 'ifSequenceNumberEqualTo', 'type': 'long'}, + "if_sequence_number_less_than_or_equal_to": {"key": "ifSequenceNumberLessThanOrEqualTo", "type": "int"}, + "if_sequence_number_less_than": {"key": "ifSequenceNumberLessThan", "type": "int"}, + "if_sequence_number_equal_to": {"key": "ifSequenceNumberEqualTo", "type": "int"}, } def __init__( @@ -2003,74 +2404,89 @@ def __init__( if_sequence_number_less_than_or_equal_to: Optional[int] = None, if_sequence_number_less_than: Optional[int] = None, if_sequence_number_equal_to: Optional[int] = None, - **kwargs - ): - super(SequenceNumberAccessConditions, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on + a blob if it has a sequence number less than or equal to the specified. + :paramtype if_sequence_number_less_than_or_equal_to: int + :keyword if_sequence_number_less_than: Specify this header value to operate only on a blob if + it has a sequence number less than the specified. + :paramtype if_sequence_number_less_than: int + :keyword if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. + :paramtype if_sequence_number_equal_to: int + """ + super().__init__(**kwargs) self.if_sequence_number_less_than_or_equal_to = if_sequence_number_less_than_or_equal_to self.if_sequence_number_less_than = if_sequence_number_less_than self.if_sequence_number_equal_to = if_sequence_number_equal_to -class SignedIdentifier(msrest.serialization.Model): +class SignedIdentifier(_serialization.Model): """signed identifier. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param id: Required. a unique id. - :type id: str - :param access_policy: An Access policy. - :type access_policy: ~azure.storage.blob.models.AccessPolicy + :ivar id: a unique id. Required. + :vartype id: str + :ivar access_policy: An Access policy. + :vartype access_policy: ~azure.storage.blob.models.AccessPolicy """ _validation = { - 'id': {'required': True}, + "id": {"required": True}, } _attribute_map = { - 'id': {'key': 'Id', 'type': 'str'}, - 'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy'}, - } - _xml_map = { - 'name': 'SignedIdentifier' + "id": {"key": "Id", "type": "str"}, + "access_policy": {"key": "AccessPolicy", "type": "AccessPolicy"}, } + _xml_map = {"name": "SignedIdentifier"} def __init__( self, *, - id: str, - access_policy: Optional["AccessPolicy"] = None, - **kwargs - ): - super(SignedIdentifier, self).__init__(**kwargs) + id: str, # pylint: disable=redefined-builtin + access_policy: Optional["_models.AccessPolicy"] = None, + **kwargs: Any + ) -> None: + """ + :keyword id: a unique id. Required. + :paramtype id: str + :keyword access_policy: An Access policy. + :paramtype access_policy: ~azure.storage.blob.models.AccessPolicy + """ + super().__init__(**kwargs) self.id = id self.access_policy = access_policy -class SourceModifiedAccessConditions(msrest.serialization.Model): +class SourceModifiedAccessConditions(_serialization.Model): """Parameter group. - :param source_if_modified_since: Specify this header value to operate only on a blob if it has + :ivar source_if_modified_since: Specify this header value to operate only on a blob if it has been modified since the specified date/time. - :type source_if_modified_since: ~datetime.datetime - :param source_if_unmodified_since: Specify this header value to operate only on a blob if it - has not been modified since the specified date/time. - :type source_if_unmodified_since: ~datetime.datetime - :param source_if_match: Specify an ETag value to operate only on blobs with a matching value. - :type source_if_match: str - :param source_if_none_match: Specify an ETag value to operate only on blobs without a matching + :vartype source_if_modified_since: ~datetime.datetime + :ivar source_if_unmodified_since: Specify this header value to operate only on a blob if it has + not been modified since the specified date/time. + :vartype source_if_unmodified_since: ~datetime.datetime + :ivar source_if_match: Specify an ETag value to operate only on blobs with a matching value. + :vartype source_if_match: str + :ivar source_if_none_match: Specify an ETag value to operate only on blobs without a matching value. - :type source_if_none_match: str - :param source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + :vartype source_if_none_match: str + :ivar source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a matching value. - :type source_if_tags: str + :vartype source_if_tags: str """ _attribute_map = { - 'source_if_modified_since': {'key': 'sourceIfModifiedSince', 'type': 'rfc-1123'}, - 'source_if_unmodified_since': {'key': 'sourceIfUnmodifiedSince', 'type': 'rfc-1123'}, - 'source_if_match': {'key': 'sourceIfMatch', 'type': 'str'}, - 'source_if_none_match': {'key': 'sourceIfNoneMatch', 'type': 'str'}, - 'source_if_tags': {'key': 'sourceIfTags', 'type': 'str'}, + "source_if_modified_since": {"key": "sourceIfModifiedSince", "type": "rfc-1123"}, + "source_if_unmodified_since": {"key": "sourceIfUnmodifiedSince", "type": "rfc-1123"}, + "source_if_match": {"key": "sourceIfMatch", "type": "str"}, + "source_if_none_match": {"key": "sourceIfNoneMatch", "type": "str"}, + "source_if_tags": {"key": "sourceIfTags", "type": "str"}, } def __init__( @@ -2081,9 +2497,25 @@ def __init__( source_if_match: Optional[str] = None, source_if_none_match: Optional[str] = None, source_if_tags: Optional[str] = None, - **kwargs - ): - super(SourceModifiedAccessConditions, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify an ETag value to operate only on blobs without a + matching value. + :paramtype source_if_none_match: str + :keyword source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with + a matching value. + :paramtype source_if_tags: str + """ + super().__init__(**kwargs) self.source_if_modified_since = source_if_modified_since self.source_if_unmodified_since = source_if_unmodified_since self.source_if_match = source_if_match @@ -2091,30 +2523,30 @@ def __init__( self.source_if_tags = source_if_tags -class StaticWebsite(msrest.serialization.Model): +class StaticWebsite(_serialization.Model): """The properties that enable an account to host a static website. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :param enabled: Required. Indicates whether this account is hosting a static website. - :type enabled: bool - :param index_document: The default name of the index page under each directory. - :type index_document: str - :param error_document404_path: The absolute path of the custom 404 page. - :type error_document404_path: str - :param default_index_document_path: Absolute path of the default index page. - :type default_index_document_path: str + :ivar enabled: Indicates whether this account is hosting a static website. Required. + :vartype enabled: bool + :ivar index_document: The default name of the index page under each directory. + :vartype index_document: str + :ivar error_document404_path: The absolute path of the custom 404 page. + :vartype error_document404_path: str + :ivar default_index_document_path: Absolute path of the default index page. + :vartype default_index_document_path: str """ _validation = { - 'enabled': {'required': True}, + "enabled": {"required": True}, } _attribute_map = { - 'enabled': {'key': 'Enabled', 'type': 'bool'}, - 'index_document': {'key': 'IndexDocument', 'type': 'str'}, - 'error_document404_path': {'key': 'ErrorDocument404Path', 'type': 'str'}, - 'default_index_document_path': {'key': 'DefaultIndexDocumentPath', 'type': 'str'}, + "enabled": {"key": "Enabled", "type": "bool"}, + "index_document": {"key": "IndexDocument", "type": "str"}, + "error_document404_path": {"key": "ErrorDocument404Path", "type": "str"}, + "default_index_document_path": {"key": "DefaultIndexDocumentPath", "type": "str"}, } def __init__( @@ -2124,83 +2556,113 @@ def __init__( index_document: Optional[str] = None, error_document404_path: Optional[str] = None, default_index_document_path: Optional[str] = None, - **kwargs - ): - super(StaticWebsite, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword enabled: Indicates whether this account is hosting a static website. Required. + :paramtype enabled: bool + :keyword index_document: The default name of the index page under each directory. + :paramtype index_document: str + :keyword error_document404_path: The absolute path of the custom 404 page. + :paramtype error_document404_path: str + :keyword default_index_document_path: Absolute path of the default index page. + :paramtype default_index_document_path: str + """ + super().__init__(**kwargs) self.enabled = enabled self.index_document = index_document self.error_document404_path = error_document404_path self.default_index_document_path = default_index_document_path -class StorageError(msrest.serialization.Model): +class StorageError(_serialization.Model): """StorageError. - :param message: - :type message: str + :ivar message: + :vartype message: str """ _attribute_map = { - 'message': {'key': 'Message', 'type': 'str'}, + "message": {"key": "Message", "type": "str"}, } - def __init__( - self, - *, - message: Optional[str] = None, - **kwargs - ): - super(StorageError, self).__init__(**kwargs) + def __init__(self, *, message: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword message: + :paramtype message: str + """ + super().__init__(**kwargs) self.message = message -class StorageServiceProperties(msrest.serialization.Model): +class StorageServiceProperties(_serialization.Model): """Storage Service Properties. - :param logging: Azure Analytics Logging settings. - :type logging: ~azure.storage.blob.models.Logging - :param hour_metrics: a summary of request statistics grouped by API in hour or minute - aggregates for blobs. - :type hour_metrics: ~azure.storage.blob.models.Metrics - :param minute_metrics: a summary of request statistics grouped by API in hour or minute + :ivar logging: Azure Analytics Logging settings. + :vartype logging: ~azure.storage.blob.models.Logging + :ivar hour_metrics: a summary of request statistics grouped by API in hour or minute aggregates + for blobs. + :vartype hour_metrics: ~azure.storage.blob.models.Metrics + :ivar minute_metrics: a summary of request statistics grouped by API in hour or minute aggregates for blobs. - :type minute_metrics: ~azure.storage.blob.models.Metrics - :param cors: The set of CORS rules. - :type cors: list[~azure.storage.blob.models.CorsRule] - :param default_service_version: The default version to use for requests to the Blob service if + :vartype minute_metrics: ~azure.storage.blob.models.Metrics + :ivar cors: The set of CORS rules. + :vartype cors: list[~azure.storage.blob.models.CorsRule] + :ivar default_service_version: The default version to use for requests to the Blob service if an incoming request's version is not specified. Possible values include version 2008-10-27 and all more recent versions. - :type default_service_version: str - :param delete_retention_policy: the retention policy which determines how long the associated + :vartype default_service_version: str + :ivar delete_retention_policy: the retention policy which determines how long the associated data should persist. - :type delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy - :param static_website: The properties that enable an account to host a static website. - :type static_website: ~azure.storage.blob.models.StaticWebsite + :vartype delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy + :ivar static_website: The properties that enable an account to host a static website. + :vartype static_website: ~azure.storage.blob.models.StaticWebsite """ _attribute_map = { - 'logging': {'key': 'Logging', 'type': 'Logging'}, - 'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics'}, - 'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics'}, - 'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'wrapped': True}}, - 'default_service_version': {'key': 'DefaultServiceVersion', 'type': 'str'}, - 'delete_retention_policy': {'key': 'DeleteRetentionPolicy', 'type': 'RetentionPolicy'}, - 'static_website': {'key': 'StaticWebsite', 'type': 'StaticWebsite'}, + "logging": {"key": "Logging", "type": "Logging"}, + "hour_metrics": {"key": "HourMetrics", "type": "Metrics"}, + "minute_metrics": {"key": "MinuteMetrics", "type": "Metrics"}, + "cors": {"key": "Cors", "type": "[CorsRule]", "xml": {"wrapped": True}}, + "default_service_version": {"key": "DefaultServiceVersion", "type": "str"}, + "delete_retention_policy": {"key": "DeleteRetentionPolicy", "type": "RetentionPolicy"}, + "static_website": {"key": "StaticWebsite", "type": "StaticWebsite"}, } def __init__( self, *, - logging: Optional["Logging"] = None, - hour_metrics: Optional["Metrics"] = None, - minute_metrics: Optional["Metrics"] = None, - cors: Optional[List["CorsRule"]] = None, + logging: Optional["_models.Logging"] = None, + hour_metrics: Optional["_models.Metrics"] = None, + minute_metrics: Optional["_models.Metrics"] = None, + cors: Optional[List["_models.CorsRule"]] = None, default_service_version: Optional[str] = None, - delete_retention_policy: Optional["RetentionPolicy"] = None, - static_website: Optional["StaticWebsite"] = None, - **kwargs - ): - super(StorageServiceProperties, self).__init__(**kwargs) + delete_retention_policy: Optional["_models.RetentionPolicy"] = None, + static_website: Optional["_models.StaticWebsite"] = None, + **kwargs: Any + ) -> None: + """ + :keyword logging: Azure Analytics Logging settings. + :paramtype logging: ~azure.storage.blob.models.Logging + :keyword hour_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :paramtype hour_metrics: ~azure.storage.blob.models.Metrics + :keyword minute_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :paramtype minute_metrics: ~azure.storage.blob.models.Metrics + :keyword cors: The set of CORS rules. + :paramtype cors: list[~azure.storage.blob.models.CorsRule] + :keyword default_service_version: The default version to use for requests to the Blob service + if an incoming request's version is not specified. Possible values include version 2008-10-27 + and all more recent versions. + :paramtype default_service_version: str + :keyword delete_retention_policy: the retention policy which determines how long the associated + data should persist. + :paramtype delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy + :keyword static_website: The properties that enable an account to host a static website. + :paramtype static_website: ~azure.storage.blob.models.StaticWebsite + """ + super().__init__(**kwargs) self.logging = logging self.hour_metrics = hour_metrics self.minute_metrics = minute_metrics @@ -2210,67 +2672,65 @@ def __init__( self.static_website = static_website -class StorageServiceStats(msrest.serialization.Model): +class StorageServiceStats(_serialization.Model): """Stats for the storage service. - :param geo_replication: Geo-Replication information for the Secondary Storage Service. - :type geo_replication: ~azure.storage.blob.models.GeoReplication + :ivar geo_replication: Geo-Replication information for the Secondary Storage Service. + :vartype geo_replication: ~azure.storage.blob.models.GeoReplication """ _attribute_map = { - 'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication'}, + "geo_replication": {"key": "GeoReplication", "type": "GeoReplication"}, } - def __init__( - self, - *, - geo_replication: Optional["GeoReplication"] = None, - **kwargs - ): - super(StorageServiceStats, self).__init__(**kwargs) + def __init__(self, *, geo_replication: Optional["_models.GeoReplication"] = None, **kwargs: Any) -> None: + """ + :keyword geo_replication: Geo-Replication information for the Secondary Storage Service. + :paramtype geo_replication: ~azure.storage.blob.models.GeoReplication + """ + super().__init__(**kwargs) self.geo_replication = geo_replication -class UserDelegationKey(msrest.serialization.Model): +class UserDelegationKey(_serialization.Model): """A user delegation key. - All required parameters must be populated in order to send to Azure. - - :param signed_oid: Required. The Azure Active Directory object ID in GUID format. - :type signed_oid: str - :param signed_tid: Required. The Azure Active Directory tenant ID in GUID format. - :type signed_tid: str - :param signed_start: Required. The date-time the key is active. - :type signed_start: ~datetime.datetime - :param signed_expiry: Required. The date-time the key expires. - :type signed_expiry: ~datetime.datetime - :param signed_service: Required. Abbreviation of the Azure Storage service that accepts the - key. - :type signed_service: str - :param signed_version: Required. The service version that created the key. - :type signed_version: str - :param value: Required. The key as a base64 string. - :type value: str + All required parameters must be populated in order to send to server. + + :ivar signed_oid: The Azure Active Directory object ID in GUID format. Required. + :vartype signed_oid: str + :ivar signed_tid: The Azure Active Directory tenant ID in GUID format. Required. + :vartype signed_tid: str + :ivar signed_start: The date-time the key is active. Required. + :vartype signed_start: ~datetime.datetime + :ivar signed_expiry: The date-time the key expires. Required. + :vartype signed_expiry: ~datetime.datetime + :ivar signed_service: Abbreviation of the Azure Storage service that accepts the key. Required. + :vartype signed_service: str + :ivar signed_version: The service version that created the key. Required. + :vartype signed_version: str + :ivar value: The key as a base64 string. Required. + :vartype value: str """ _validation = { - 'signed_oid': {'required': True}, - 'signed_tid': {'required': True}, - 'signed_start': {'required': True}, - 'signed_expiry': {'required': True}, - 'signed_service': {'required': True}, - 'signed_version': {'required': True}, - 'value': {'required': True}, + "signed_oid": {"required": True}, + "signed_tid": {"required": True}, + "signed_start": {"required": True}, + "signed_expiry": {"required": True}, + "signed_service": {"required": True}, + "signed_version": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'signed_oid': {'key': 'SignedOid', 'type': 'str'}, - 'signed_tid': {'key': 'SignedTid', 'type': 'str'}, - 'signed_start': {'key': 'SignedStart', 'type': 'iso-8601'}, - 'signed_expiry': {'key': 'SignedExpiry', 'type': 'iso-8601'}, - 'signed_service': {'key': 'SignedService', 'type': 'str'}, - 'signed_version': {'key': 'SignedVersion', 'type': 'str'}, - 'value': {'key': 'Value', 'type': 'str'}, + "signed_oid": {"key": "SignedOid", "type": "str"}, + "signed_tid": {"key": "SignedTid", "type": "str"}, + "signed_start": {"key": "SignedStart", "type": "iso-8601"}, + "signed_expiry": {"key": "SignedExpiry", "type": "iso-8601"}, + "signed_service": {"key": "SignedService", "type": "str"}, + "signed_version": {"key": "SignedVersion", "type": "str"}, + "value": {"key": "Value", "type": "str"}, } def __init__( @@ -2283,9 +2743,26 @@ def __init__( signed_service: str, signed_version: str, value: str, - **kwargs - ): - super(UserDelegationKey, self).__init__(**kwargs) + **kwargs: Any + ) -> None: + """ + :keyword signed_oid: The Azure Active Directory object ID in GUID format. Required. + :paramtype signed_oid: str + :keyword signed_tid: The Azure Active Directory tenant ID in GUID format. Required. + :paramtype signed_tid: str + :keyword signed_start: The date-time the key is active. Required. + :paramtype signed_start: ~datetime.datetime + :keyword signed_expiry: The date-time the key expires. Required. + :paramtype signed_expiry: ~datetime.datetime + :keyword signed_service: Abbreviation of the Azure Storage service that accepts the key. + Required. + :paramtype signed_service: str + :keyword signed_version: The service version that created the key. Required. + :paramtype signed_version: str + :keyword value: The key as a base64 string. Required. + :paramtype value: str + """ + super().__init__(**kwargs) self.signed_oid = signed_oid self.signed_tid = signed_tid self.signed_start = signed_start diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_patch.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_patch.py new file mode 100644 index 000000000000..71dde502c70f --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/models/_patch.py @@ -0,0 +1,26 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + + from typing import List +__all__ = [] # type: List[str] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/__init__.py index 62f85c9290c1..1be05c7aa9a7 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/__init__.py @@ -8,18 +8,22 @@ from ._service_operations import ServiceOperations from ._container_operations import ContainerOperations -from ._directory_operations import DirectoryOperations from ._blob_operations import BlobOperations from ._page_blob_operations import PageBlobOperations from ._append_blob_operations import AppendBlobOperations from ._block_blob_operations import BlockBlobOperations +from ._patch import __all__ as _patch_all +from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk + __all__ = [ - 'ServiceOperations', - 'ContainerOperations', - 'DirectoryOperations', - 'BlobOperations', - 'PageBlobOperations', - 'AppendBlobOperations', - 'BlockBlobOperations', + "ServiceOperations", + "ContainerOperations", + "BlobOperations", + "PageBlobOperations", + "AppendBlobOperations", + "BlockBlobOperations", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_append_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_append_blob_operations.py index abbe40eac438..d07c5c181bf3 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_append_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_append_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,67 +7,462 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import TYPE_CHECKING -import warnings +import sys +from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from .. import models as _models +from .._serialization import Serializer -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False -class AppendBlobOperations(object): - """AppendBlobOperations operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +def build_create_request( + url: str, + *, + content_length: int, + timeout: Optional[int] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[bytes] = None, + blob_cache_control: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + blob_type: Literal["AppendBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "AppendBlob")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-blob-type"] = _SERIALIZER.header("blob_type", blob_type, "str") + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_append_block_request( + url: str, + *, + content_length: int, + content: IO[bytes], + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + lease_id: Optional[str] = None, + max_size: Optional[int] = None, + append_position: Optional[int] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "bytearray" + ) + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if max_size is not None: + _headers["x-ms-blob-condition-maxsize"] = _SERIALIZER.header("max_size", max_size, "int") + if append_position is not None: + _headers["x-ms-blob-condition-appendpos"] = _SERIALIZER.header("append_position", append_position, "int") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if structured_body_type is not None: + _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") + if structured_content_length is not None: + _headers["x-ms-structured-content-length"] = _SERIALIZER.header( + "structured_content_length", structured_content_length, "int" + ) + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_append_block_from_url_request( + url: str, + *, + source_url: str, + content_length: int, + source_range: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + lease_id: Optional[str] = None, + max_size: Optional[int] = None, + append_position: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-copy-source"] = _SERIALIZER.header("source_url", source_url, "str") + if source_range is not None: + _headers["x-ms-source-range"] = _SERIALIZER.header("source_range", source_range, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") + if source_contentcrc64 is not None: + _headers["x-ms-source-content-crc64"] = _SERIALIZER.header( + "source_contentcrc64", source_contentcrc64, "bytearray" + ) + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if max_size is not None: + _headers["x-ms-blob-condition-maxsize"] = _SERIALIZER.header("max_size", max_size, "int") + if append_position is not None: + _headers["x-ms-blob-condition-appendpos"] = _SERIALIZER.header("append_position", append_position, "int") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_seal_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + append_position: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["seal"] = kwargs.pop("comp", _params.pop("comp", "seal")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if append_position is not None: + _headers["x-ms-blob-condition-appendpos"] = _SERIALIZER.header("append_position", append_position, "int") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class AppendBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`append_blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def create( + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - blob_tags_string=None, # type: Optional[str] - blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Create Append Blob operation creates a new append blob. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -74,38 +470,51 @@ def create( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["AppendBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "AppendBlob")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -115,6 +524,7 @@ def create( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -122,363 +532,371 @@ def create( _if_none_match = None _if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - blob_type = "AppendBlob" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_create_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + blob_type=blob_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - create.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def append_block( + @distributed_trace + def append_block( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - body, # type: IO - timeout=None, # type: Optional[int] - transactional_content_md5=None, # type: Optional[bytearray] - transactional_content_crc64=None, # type: Optional[bytearray] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - append_position_access_conditions=None, # type: Optional["_models.AppendPositionAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + body: IO[bytes], + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + request_id_parameter: Optional[str] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Append Block operation commits a new block of data to the end of an existing append blob. The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param append_position_access_conditions: Parameter group. - :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions - :param cpk_info: Parameter group. + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _max_size = None _append_position = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if append_position_access_conditions is not None: - _max_size = append_position_access_conditions.max_size _append_position = append_position_access_conditions.append_position + _max_size = append_position_access_conditions.max_size if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "appendblock" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.append_block.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _max_size is not None: - header_parameters['x-ms-blob-condition-maxsize'] = self._serialize.header("max_size", _max_size, 'long') - if _append_position is not None: - header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = body + + _request = build_append_block_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + lease_id=_lease_id, + max_size=_max_size, + append_position=_append_position, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-append-offset"] = self._deserialize( + "str", response.headers.get("x-ms-blob-append-offset") + ) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) - - append_block.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def append_block_from_url( + @distributed_trace + def append_block_from_url( # pylint: disable=inconsistent-return-statements self, - source_url, # type: str - content_length, # type: int - source_range=None, # type: Optional[str] - source_content_md5=None, # type: Optional[bytearray] - source_contentcrc64=None, # type: Optional[bytearray] - timeout=None, # type: Optional[int] - transactional_content_md5=None, # type: Optional[bytearray] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - append_position_access_conditions=None, # type: Optional["_models.AppendPositionAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + source_url: str, + content_length: int, + source_range: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Append Block operation commits a new block of data to the end of an existing append blob where the contents are read from a source url. The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - :param source_url: Specify a URL to the copy source. + :param source_url: Specify a URL to the copy source. Required. :type source_url: str - :param content_length: The length of the request. - :type content_length: long - :param source_range: Bytes of source data in the specified range. + :param content_length: The length of the request. Required. + :type content_length: int + :param source_range: Bytes of source data in the specified range. Default value is None. :type source_range: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray + from the copy source. Default value is None. + :type source_content_md5: bytes :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. - :type source_contentcrc64: bytearray + read from the copy source. Default value is None. + :type source_contentcrc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + by the service. Default value is None. + :type transactional_content_md5: bytes :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param append_position_access_conditions: Parameter group. - :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions - :param modified_access_conditions: Parameter group. + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _lease_id = None _max_size = None @@ -492,226 +910,207 @@ def append_block_from_url( _source_if_unmodified_since = None _source_if_match = None _source_if_none_match = None - if append_position_access_conditions is not None: - _max_size = append_position_access_conditions.max_size - _append_position = append_position_access_conditions.append_position if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id + if append_position_access_conditions is not None: + _append_position = append_position_access_conditions.append_position + _max_size = append_position_access_conditions.max_size if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match - comp = "appendblock" - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_append_block_from_url_request( + url=self._config.url, + source_url=source_url, + content_length=content_length, + source_range=source_range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + max_size=_max_size, + append_position=_append_position, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.append_block_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') - if source_range is not None: - header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if source_contentcrc64 is not None: - header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _max_size is not None: - header_parameters['x-ms-blob-condition-maxsize'] = self._serialize.header("max_size", _max_size, 'long') - if _append_position is not None: - header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-append-offset"] = self._deserialize( + "str", response.headers.get("x-ms-blob-append-offset") + ) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - append_block_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def seal( + @distributed_trace + def seal( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - append_position_access_conditions=None, # type: Optional["_models.AppendPositionAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version 2019-12-12 version or later. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param append_position_access_conditions: Parameter group. - :type append_position_access_conditions: ~azure.storage.blob.models.AppendPositionAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["seal"] = kwargs.pop("comp", _params.pop("comp", "seal")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _append_position = None - if append_position_access_conditions is not None: - _append_position = append_position_access_conditions.append_position if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match - comp = "seal" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if append_position_access_conditions is not None: + _append_position = append_position_access_conditions.append_position + + _request = build_seal_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + append_position=_append_position, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.seal.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _append_position is not None: - header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", _append_position, 'long') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) if cls: - return cls(pipeline_response, None, response_headers) - - seal.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_blob_operations.py index 730a5648e0f7..5da0b2b033a9 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,518 +7,2110 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, Iterator, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from .. import models as _models +from .._serialization import Serializer + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_download_request( + url: str, + *, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + range_get_content_md5: Optional[bool] = None, + range_get_content_crc64: Optional[bool] = None, + structured_body_type: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if range_get_content_md5 is not None: + _headers["x-ms-range-get-content-md5"] = _SERIALIZER.header( + "range_get_content_md5", range_get_content_md5, "bool" + ) + if range_get_content_crc64 is not None: + _headers["x-ms-range-get-content-crc64"] = _SERIALIZER.header( + "range_get_content_crc64", range_get_content_crc64, "bool" + ) + if structured_body_type is not None: + _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_properties_request( + url: str, + *, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + url: str, + *, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + blob_delete_type: Literal["Permanent"] = "Permanent", + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if blob_delete_type is not None: + _params["deletetype"] = _SERIALIZER.query("blob_delete_type", blob_delete_type, "str") + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if delete_snapshots is not None: + _headers["x-ms-delete-snapshots"] = _SERIALIZER.header("delete_snapshots", delete_snapshots, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_undelete_request( + url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_expiry_request( + url: str, + *, + expiry_options: Union[str, _models.BlobExpiryOptions], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + expires_on: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["x-ms-expiry-option"] = _SERIALIZER.header("expiry_options", expiry_options, "str") + if expires_on is not None: + _headers["x-ms-expiry-time"] = _SERIALIZER.header("expires_on", expires_on, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_http_headers_request( + url: str, + *, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_md5: Optional[bytes] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_immutability_policy_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_immutability_policy_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_legal_hold_request( + url: str, + *, + legal_hold: bool, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["legalhold"] = kwargs.pop("comp", _params.pop("comp", "legalhold")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_metadata_request( + url: str, + *, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_acquire_lease_request( + url: str, + *, + timeout: Optional[int] = None, + duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + if duration is not None: + _headers["x-ms-lease-duration"] = _SERIALIZER.header("duration", duration, "int") + if proposed_lease_id is not None: + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_release_lease_request( + url: str, + *, + lease_id: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_renew_lease_request( + url: str, + *, + lease_id: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_change_lease_request( + url: str, + *, + lease_id: str, + proposed_lease_id: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_break_lease_request( + url: str, + *, + timeout: Optional[int] = None, + break_period: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + if break_period is not None: + _headers["x-ms-lease-break-period"] = _SERIALIZER.header("break_period", break_period, "int") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_snapshot_request( + url: str, + *, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_start_copy_from_url_request( + url: str, + *, + copy_source: str, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_tags: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + seal_blob: Optional[bool] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if rehydrate_priority is not None: + _headers["x-ms-rehydrate-priority"] = _SERIALIZER.header("rehydrate_priority", rehydrate_priority, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + if source_if_tags is not None: + _headers["x-ms-source-if-tags"] = _SERIALIZER.header("source_if_tags", source_if_tags, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if seal_blob is not None: + _headers["x-ms-seal-blob"] = _SERIALIZER.header("seal_blob", seal_blob, "bool") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_copy_from_url_request( + url: str, + *, + copy_source: str, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + encryption_scope: Optional[str] = None, + copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + x_ms_requires_sync: Literal["true"] = kwargs.pop("x_ms_requires_sync", _headers.pop("x-ms-requires-sync", "true")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-requires-sync"] = _SERIALIZER.header("x_ms_requires_sync", x_ms_requires_sync, "str") + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if copy_source_tags is not None: + _headers["x-ms-copy-source-tag-option"] = _SERIALIZER.header("copy_source_tags", copy_source_tags, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_abort_copy_from_url_request( + url: str, + *, + copy_id: str, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["copy"] = kwargs.pop("comp", _params.pop("comp", "copy")) + copy_action_abort_constant: Literal["abort"] = kwargs.pop( + "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort") + ) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["copyid"] = _SERIALIZER.query("copy_id", copy_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-copy-action"] = _SERIALIZER.header("copy_action_abort_constant", copy_action_abort_constant, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_tier_request( + url: str, + *, + tier: Union[str, _models.AccessTierRequired], + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + request_id_parameter: Optional[str] = None, + lease_id: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tier"] = kwargs.pop("comp", _params.pop("comp", "tier")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if rehydrate_priority is not None: + _headers["x-ms-rehydrate-priority"] = _SERIALIZER.header("rehydrate_priority", rehydrate_priority, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_account_info_request( + url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_query_request( + url: str, + *, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["query"] = kwargs.pop("comp", _params.pop("comp", "query")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_get_tags_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_tags_request( + url: str, + *, + timeout: Optional[int] = None, + version_id: Optional[str] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + request_id_parameter: Optional[str] = None, + if_tags: Optional[str] = None, + lease_id: Optional[str] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if version_id is not None: + _params["versionid"] = _SERIALIZER.query("version_id", version_id, "str") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "bytearray" + ) + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +class BlobOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class BlobOperations(object): - """BlobOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace def download( self, - snapshot=None, # type: Optional[str] - version_id=None, # type: Optional[str] - timeout=None, # type: Optional[int] - range=None, # type: Optional[str] - range_get_content_md5=None, # type: Optional[bool] - range_get_content_crc64=None, # type: Optional[bool] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> IO + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + range_get_content_md5: Optional[bool] = None, + range_get_content_crc64: Optional[bool] = None, + structured_body_type: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> Iterator[bytes]: + # pylint: disable=line-too-long """The Download operation reads or downloads a blob from the system, including its metadata and properties. You can also call Download to read a snapshot. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str :param range_get_content_md5: When set to true and specified together with the Range, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB - in size. + in size. Default value is None. :type range_get_content_md5: bool :param range_get_content_crc64: When set to true and specified together with the Range, the service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 - MB in size. + MB in size. Default value is None. :type range_get_content_crc64: bool - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + :param structured_body_type: Specifies the response content should be returned as a structured + message and specifies the message schema version and properties. Default value is None. + :type structured_body_type: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IO, or the result of cls(response) - :rtype: IO - :raises: ~azure.core.exceptions.HttpResponseError + :return: Iterator[bytes] or the result of cls(response) + :rtype: Iterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[IO] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_download_request( + url=self._config.url, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + range=range, + lease_id=_lease_id, + range_get_content_md5=range_get_content_md5, + range_get_content_crc64=range_get_content_crc64, + structured_body_type=structured_body_type, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.download.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if range_get_content_md5 is not None: - header_parameters['x-ms-range-get-content-md5'] = self._serialize.header("range_get_content_md5", range_get_content_md5, 'bool') - if range_get_content_crc64 is not None: - header_parameters['x-ms-range-get-content-crc64'] = self._serialize.header("range_get_content_crc64", range_get_content_crc64, 'bool') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} if response.status_code == 200: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) - response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) - response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) + response_headers["x-ms-structured-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-structured-content-length") + ) if response.status_code == 206: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) - response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) - response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) + response_headers["x-ms-structured-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-structured-content-length") + ) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - download.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore - def get_properties( + @distributed_trace + def get_properties( # pylint: disable=inconsistent-return-statements self, - snapshot=None, # type: Optional[str] - version_id=None, # type: Optional[str] - timeout=None, # type: Optional[int] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system properties for the blob. It does not return the content of the blob. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_get_properties_request( + url=self._config.url, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.head(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-creation-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-creation-time')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) - response_headers['x-ms-or']=self._deserialize('str', response.headers.get('x-ms-or')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-incremental-copy']=self._deserialize('bool', response.headers.get('x-ms-incremental-copy')) - response_headers['x-ms-copy-destination-snapshot']=self._deserialize('str', response.headers.get('x-ms-copy-destination-snapshot')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-access-tier']=self._deserialize('str', response.headers.get('x-ms-access-tier')) - response_headers['x-ms-access-tier-inferred']=self._deserialize('bool', response.headers.get('x-ms-access-tier-inferred')) - response_headers['x-ms-archive-status']=self._deserialize('str', response.headers.get('x-ms-archive-status')) - response_headers['x-ms-access-tier-change-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-access-tier-change-time')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) - response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) - response_headers['x-ms-expiry-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-expiry-time')) - response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) - response_headers['x-ms-rehydrate-priority']=self._deserialize('str', response.headers.get('x-ms-rehydrate-priority')) - response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["x-ms-or-policy-id"] = self._deserialize("str", response.headers.get("x-ms-or-policy-id")) + response_headers["x-ms-or"] = self._deserialize("{str}", response.headers.get("x-ms-or")) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress")) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-incremental-copy"] = self._deserialize( + "bool", response.headers.get("x-ms-incremental-copy") + ) + response_headers["x-ms-copy-destination-snapshot"] = self._deserialize( + "str", response.headers.get("x-ms-copy-destination-snapshot") + ) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-access-tier"] = self._deserialize("str", response.headers.get("x-ms-access-tier")) + response_headers["x-ms-access-tier-inferred"] = self._deserialize( + "bool", response.headers.get("x-ms-access-tier-inferred") + ) + response_headers["x-ms-archive-status"] = self._deserialize("str", response.headers.get("x-ms-archive-status")) + response_headers["x-ms-access-tier-change-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-access-tier-change-time") + ) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["x-ms-is-current-version"] = self._deserialize( + "bool", response.headers.get("x-ms-is-current-version") + ) + response_headers["x-ms-tag-count"] = self._deserialize("int", response.headers.get("x-ms-tag-count")) + response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) + response_headers["x-ms-blob-sealed"] = self._deserialize("bool", response.headers.get("x-ms-blob-sealed")) + response_headers["x-ms-rehydrate-priority"] = self._deserialize( + "str", response.headers.get("x-ms-rehydrate-priority") + ) + response_headers["x-ms-last-access-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-last-access-time") + ) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) if cls: - return cls(pipeline_response, None, response_headers) - - get_properties.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def delete( + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements self, - snapshot=None, # type: Optional[str] - version_id=None, # type: Optional[str] - timeout=None, # type: Optional[int] - delete_snapshots=None, # type: Optional[Union[str, "_models.DeleteSnapshotsOptionType"]] - request_id_parameter=None, # type: Optional[str] - blob_delete_type="Permanent", # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, + request_id_parameter: Optional[str] = None, + blob_delete_type: Literal["Permanent"] = "Permanent", + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is permanently removed from the storage account. If the storage account's soft delete feature is enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible immediately. However, the blob service retains the blob or snapshot for the number of days - specified by the DeleteRetentionPolicy section of [Storage service properties] (Set-Blob- - Service-Properties.md). After the specified number of days has passed, the blob's data is - permanently removed from the storage account. Note that you continue to be charged for the - soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify - the "include=deleted" query parameter to discover which blobs and snapshots have been soft - deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other + specified by the DeleteRetentionPolicy section of [Storage service properties] + (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's + data is permanently removed from the storage account. Note that you continue to be charged for + the soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and + specify the "include=deleted" query parameter to discover which blobs and snapshots have been + soft deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 (ResourceNotFound). :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param delete_snapshots: Required if the blob has associated snapshots. Specify one of the following two options: include: Delete the base blob and all of its snapshots. only: Delete - only the blob's snapshots and not the blob itself. + only the blob's snapshots and not the blob itself. Known values are: "include" and "only". + Default value is None. :type delete_snapshots: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param blob_delete_type: Optional. Only possible value is 'permanent', which specifies to - permanently delete a blob if blob soft delete is enabled. + permanently delete a blob if blob soft delete is enabled. Known values are "Permanent" and + None. Default value is "Permanent". :type blob_delete_type: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -527,800 +2120,634 @@ def delete( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_delete_request( + url=self._config.url, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + lease_id=_lease_id, + delete_snapshots=delete_snapshots, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_delete_type=blob_delete_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if blob_delete_type is not None: - query_parameters['deletetype'] = self._serialize.query("blob_delete_type", blob_delete_type, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if delete_snapshots is not None: - header_parameters['x-ms-delete-snapshots'] = self._serialize.header("delete_snapshots", delete_snapshots, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - delete.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def set_access_control( - self, - timeout=None, # type: Optional[int] - owner=None, # type: Optional[str] - group=None, # type: Optional[str] - posix_permissions=None, # type: Optional[str] - posix_acl=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None - """Set the owner, group, permissions, or access control list for a blob. + @distributed_trace + def undelete( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """Undelete a blob that was previously soft deleted. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param owner: Optional. The owner of the blob or directory. - :type owner: str - :param group: Optional. The owning group of the blob or directory. - :type group: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". - :type posix_acl: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - action = "setAccessControl" - accept = "application/xml" + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_undelete_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.set_access_control.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['action'] = self._serialize.query("action", action, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if owner is not None: - header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str') - if group is not None: - header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_acl is not None: - header_parameters['x-ms-acl'] = self._serialize.header("posix_acl", posix_acl, 'str') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.patch(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def get_access_control( + @distributed_trace + def set_expiry( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - upn=None, # type: Optional[bool] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None - """Get the owner, group, permissions, or access control list for a blob. + expiry_options: Union[str, _models.BlobExpiryOptions], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + expires_on: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """Sets the time a blob will expire and be deleted. + :param expiry_options: Required. Indicates mode of the expiry time. Known values are: + "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Required. + :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If - "true", the identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response - headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If - "false", the values will be returned as Azure Active Directory Object IDs. The default value is - false. - :type upn: bool :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param expires_on: The time to set the blob to expiry. Default value is None. + :type expires_on: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - action = "getAccessControl" - accept = "application/xml" + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_set_expiry_request( + url=self._config.url, + expiry_options=expiry_options, + timeout=timeout, + request_id_parameter=request_id_parameter, + expires_on=expires_on, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_access_control.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['action'] = self._serialize.query("action", action, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if upn is not None: - query_parameters['upn'] = self._serialize.query("upn", upn, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.head(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-owner']=self._deserialize('str', response.headers.get('x-ms-owner')) - response_headers['x-ms-group']=self._deserialize('str', response.headers.get('x-ms-group')) - response_headers['x-ms-permissions']=self._deserialize('str', response.headers.get('x-ms-permissions')) - response_headers['x-ms-acl']=self._deserialize('str', response.headers.get('x-ms-acl')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - get_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore - - def rename( + @distributed_trace + def set_http_headers( # pylint: disable=inconsistent-return-statements self, - rename_source, # type: str - timeout=None, # type: Optional[int] - path_rename_mode=None, # type: Optional[Union[str, "_models.PathRenameMode"]] - directory_properties=None, # type: Optional[str] - posix_permissions=None, # type: Optional[str] - posix_umask=None, # type: Optional[str] - source_lease_id=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - directory_http_headers=None, # type: Optional["_models.DirectoryHttpHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None - """Rename a blob/file. By default, the destination is overwritten and if the destination already - exists and has a lease the lease is broken. This operation supports conditional HTTP requests. - For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. To fail if the destination already exists, use a conditional - request with If-None-Match: "*". - - :param rename_source: The file or directory to be renamed. The value must have the following - format: "/{filesysystem}/{path}". If "x-ms-properties" is specified, the properties will - overwrite the existing properties; otherwise, the existing properties will be preserved. - :type rename_source: str + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """The Set HTTP Headers operation sets system properties on the blob. + :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param path_rename_mode: Determines the behavior of the rename operation. - :type path_rename_mode: str or ~azure.storage.blob.models.PathRenameMode - :param directory_properties: Optional. User-defined properties to be stored with the file or - directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", - where each value is base64 encoded. - :type directory_properties: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_umask: Only valid if Hierarchical Namespace is enabled for the account. This umask - restricts permission settings for file and directory, and will only be applied when default Acl - does not exist in parent directory. If the umask bit has set, it means that the corresponding - permission will be disabled. Otherwise the corresponding permission will be determined by the - permission. A 4-digit octal notation (e.g. 0022) is supported here. If no umask was specified, - a default umask - 0027 will be used. - :type posix_umask: str - :param source_lease_id: A lease ID for the source path. If specified, the source path must have - an active lease and the lease ID must match. - :type source_lease_id: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param directory_http_headers: Parameter group. - :type directory_http_headers: ~azure.storage.blob.models.DirectoryHttpHeaders - :param lease_access_conditions: Parameter group. + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _cache_control = None - _content_type = None - _content_encoding = None - _content_language = None - _content_disposition = None + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _blob_cache_control = None + _blob_content_type = None + _blob_content_md5 = None + _blob_content_encoding = None + _blob_content_language = None _lease_id = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - if directory_http_headers is not None: - _cache_control = directory_http_headers.cache_control - _content_type = directory_http_headers.content_type - _content_encoding = directory_http_headers.content_encoding - _content_language = directory_http_headers.content_language - _content_disposition = directory_http_headers.content_disposition + _if_tags = None + _blob_content_disposition = None + if blob_http_headers is not None: + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_content_type = blob_http_headers.blob_content_type if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match - if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_none_match = source_modified_access_conditions.source_if_none_match - accept = "application/xml" + _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_set_http_headers_request( + url=self._config.url, + timeout=timeout, + blob_cache_control=_blob_cache_control, + blob_content_type=_blob_content_type, + blob_content_md5=_blob_content_md5, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_content_disposition=_blob_content_disposition, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.rename.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if path_rename_mode is not None: - query_parameters['mode'] = self._serialize.query("path_rename_mode", path_rename_mode, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-rename-source'] = self._serialize.header("rename_source", rename_source, 'str') - if directory_properties is not None: - header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_umask is not None: - header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str') - if _cache_control is not None: - header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str') - if _content_type is not None: - header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str') - if _content_encoding is not None: - header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str') - if _content_language is not None: - header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str') - if _content_disposition is not None: - header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if source_lease_id is not None: - header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [201]: + if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - rename.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def undelete( + @distributed_trace + def set_immutability_policy( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None - """Undelete a blob that was previously soft deleted. + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """The Set Immutability Policy operation sets the immutability policy on the blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "undelete" - accept = "application/xml" - - # Construct URL - url = self.undelete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_set_immutability_policy_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + if_unmodified_since=_if_unmodified_since, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + snapshot=snapshot, + version_id=version_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-immutability-policy-until-date"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-immutability-policy-until-date") + ) + response_headers["x-ms-immutability-policy-mode"] = self._deserialize( + "str", response.headers.get("x-ms-immutability-policy-mode") + ) if cls: - return cls(pipeline_response, None, response_headers) - - undelete.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def set_expiry( + @distributed_trace + def delete_immutability_policy( # pylint: disable=inconsistent-return-statements self, - expiry_options, # type: Union[str, "_models.BlobExpiryOptions"] - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - expires_on=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None - """Sets the time a blob will expire and be deleted. + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """The Delete Immutability Policy operation deletes the immutability policy on the blob. - :param expiry_options: Required. Indicates mode of the expiry time. - :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param expires_on: The time to set the blob to expiry. - :type expires_on: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - comp = "expiry" - accept = "application/xml" - - # Construct URL - url = self.set_expiry.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str') - if expires_on is not None: - header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_delete_immutability_policy_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_expiry.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def set_http_headers( + @distributed_trace + def set_legal_hold( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None - """The Set HTTP Headers operation sets system properties on the blob. - + legal_hold: bool, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """The Set Legal Hold operation sets a legal hold on the blob. + + :param legal_hold: Specified if a legal hold should be set on the blob. Required. + :type legal_hold: bool :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_http_headers: Parameter group. - :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - - _blob_cache_control = None - _blob_content_type = None - _blob_content_md5 = None - _blob_content_encoding = None - _blob_content_language = None - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _if_tags = None - _blob_content_disposition = None - if blob_http_headers is not None: - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_type = blob_http_headers.blob_content_type - _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_encoding = blob_http_headers.blob_content_encoding - _blob_content_language = blob_http_headers.blob_content_language - _blob_content_disposition = blob_http_headers.blob_content_disposition - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags - comp = "properties" - accept = "application/xml" + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["legalhold"] = kwargs.pop("comp", _params.pop("comp", "legalhold")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_set_legal_hold_request( + url=self._config.url, + legal_hold=legal_hold, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.set_http_headers.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-legal-hold")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - set_http_headers.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def set_metadata( + @distributed_trace + def set_metadata( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more name-value pairs. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -1328,687 +2755,663 @@ def set_metadata( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "metadata" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_set_metadata_request( + url=self._config.url, + timeout=timeout, + metadata=metadata, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.set_metadata.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - set_metadata.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def acquire_lease( + @distributed_trace + def acquire_lease( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - duration=None, # type: Optional[int] - proposed_lease_id=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease - duration cannot be changed using renew or change. + duration cannot be changed using renew or change. Default value is None. :type duration: int :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Default value is None. :type proposed_lease_id: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "acquire" - accept = "application/xml" - - # Construct URL - url = self.acquire_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if duration is not None: - header_parameters['x-ms-lease-duration'] = self._serialize.header("duration", duration, 'int') - if proposed_lease_id is not None: - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_acquire_lease_request( + url=self._config.url, + timeout=timeout, + duration=duration, + proposed_lease_id=proposed_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - acquire_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def release_lease( + @distributed_trace + def release_lease( # pylint: disable=inconsistent-return-statements self, - lease_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "release" - accept = "application/xml" - - # Construct URL - url = self.release_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_release_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - release_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def renew_lease( + @distributed_trace + def renew_lease( # pylint: disable=inconsistent-return-statements self, - lease_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "renew" - accept = "application/xml" - - # Construct URL - url = self.renew_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_renew_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - renew_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def change_lease( + @distributed_trace + def change_lease( # pylint: disable=inconsistent-return-statements self, - lease_id, # type: str - proposed_lease_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + lease_id: str, + proposed_lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Required. :type proposed_lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "change" - accept = "application/xml" - - # Construct URL - url = self.change_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_change_lease_request( + url=self._config.url, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - change_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def break_lease( + @distributed_trace + def break_lease( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - break_period=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + break_period: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param break_period: For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for - longer than the break period. If this header does not appear with a break operation, a fixed- - duration lease breaks after the remaining lease period elapses, and an infinite lease breaks - immediately. + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. Default value is None. :type break_period: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "lease" - action = "break" - accept = "application/xml" - - # Construct URL - url = self.break_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if break_period is not None: - header_parameters['x-ms-lease-break-period'] = self._serialize.header("break_period", break_period, 'int') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_break_lease_request( + url=self._config.url, + timeout=timeout, + break_period=break_period, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - break_lease.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def create_snapshot( + @distributed_trace + def create_snapshot( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + request_id_parameter: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Create Snapshot operation creates a read-only snapshot of a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -2016,36 +3419,41 @@ def create_snapshot( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -2054,114 +3462,103 @@ def create_snapshot( _if_tags = None _lease_id = None if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "snapshot" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + _request = build_create_snapshot_request( + url=self._config.url, + timeout=timeout, + metadata=metadata, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.create_snapshot.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-snapshot']=self._deserialize('str', response.headers.get('x-ms-snapshot')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers["x-ms-snapshot"] = self._deserialize("str", response.headers.get("x-ms-snapshot")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) if cls: - return cls(pipeline_response, None, response_headers) - - create_snapshot.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def start_copy_from_url( + @distributed_trace + def start_copy_from_url( # pylint: disable=inconsistent-return-statements self, - copy_source, # type: str - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] - rehydrate_priority=None, # type: Optional[Union[str, "_models.RehydratePriority"]] - request_id_parameter=None, # type: Optional[str] - blob_tags_string=None, # type: Optional[str] - seal_blob=None, # type: Optional[bool] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + copy_source: str, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + seal_blob: Optional[bool] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Start Copy From URL operation copies a blob or an internet resource to a new blob. :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -2169,38 +3566,57 @@ def start_copy_from_url( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived - blob. + blob. Known values are: "High" and "Standard". Default value is None. :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str :param seal_blob: Overrides the sealed state of the destination blob. Service version - 2019-12-12 and newer. + 2019-12-12 and newer. Default value is None. :type seal_blob: bool - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :param modified_access_conditions: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + _source_if_modified_since = None _source_if_unmodified_since = None _source_if_match = None @@ -2212,125 +3628,113 @@ def start_copy_from_url( _if_none_match = None _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_tags = modified_access_conditions.if_tags if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_tags = source_modified_access_conditions.source_if_tags - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + if modified_access_conditions is not None: + _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + _request = build_start_copy_from_url_request( + url=self._config.url, + copy_source=copy_source, + timeout=timeout, + metadata=metadata, + tier=tier, + rehydrate_priority=rehydrate_priority, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + source_if_tags=_source_if_tags, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + seal_blob=seal_blob, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.start_copy_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if rehydrate_priority is not None: - header_parameters['x-ms-rehydrate-priority'] = self._serialize.header("rehydrate_priority", rehydrate_priority, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - if _source_if_tags is not None: - header_parameters['x-ms-source-if-tags'] = self._serialize.header("source_if_tags", _source_if_tags, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - if seal_blob is not None: - header_parameters['x-ms-seal-blob'] = self._serialize.header("seal_blob", seal_blob, 'bool') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - start_copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def copy_from_url( + @distributed_trace + def copy_from_url( # pylint: disable=inconsistent-return-statements self, - copy_source, # type: str - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] - request_id_parameter=None, # type: Optional[str] - source_content_md5=None, # type: Optional[bytearray] - blob_tags_string=None, # type: Optional[str] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + copy_source: str, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + request_id_parameter: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return a response until the copy is complete. :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -2338,35 +3742,66 @@ def copy_from_url( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + from the copy source. Default value is None. + :type source_content_md5: bytes + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :param modified_access_conditions: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and + "COPY". Default value is None. + :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + x_ms_requires_sync: Literal["true"] = kwargs.pop( + "x_ms_requires_sync", _headers.pop("x-ms-requires-sync", "true") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + _source_if_modified_since = None _source_if_unmodified_since = None _source_if_match = None @@ -2377,787 +3812,829 @@ def copy_from_url( _if_none_match = None _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id + _encryption_scope = None + if source_modified_access_conditions is not None: + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_none_match = source_modified_access_conditions.source_if_none_match + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_none_match = source_modified_access_conditions.source_if_none_match - x_ms_requires_sync = "true" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + + _request = build_copy_from_url_request( + url=self._config.url, + copy_source=copy_source, + timeout=timeout, + metadata=metadata, + tier=tier, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + copy_source_authorization=copy_source_authorization, + encryption_scope=_encryption_scope, + copy_source_tags=copy_source_tags, + x_ms_requires_sync=x_ms_requires_sync, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.copy_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-requires-sync'] = self._serialize.header("x_ms_requires_sync", x_ms_requires_sync, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def abort_copy_from_url( + @distributed_trace + def abort_copy_from_url( # pylint: disable=inconsistent-return-statements self, - copy_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + copy_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination blob with zero length and full metadata. :param copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy - Blob operation. + Blob operation. Required. :type copy_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["copy"] = kwargs.pop("comp", _params.pop("comp", "copy")) + copy_action_abort_constant: Literal["abort"] = kwargs.pop( + "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id - comp = "copy" - copy_action_abort_constant = "abort" - accept = "application/xml" - - # Construct URL - url = self.abort_copy_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['copyid'] = self._serialize.query("copy_id", copy_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-copy-action'] = self._serialize.header("copy_action_abort_constant", copy_action_abort_constant, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_abort_copy_from_url_request( + url=self._config.url, + copy_id=copy_id, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + comp=comp, + copy_action_abort_constant=copy_action_abort_constant, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - abort_copy_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def set_tier( + @distributed_trace + def set_tier( # pylint: disable=inconsistent-return-statements self, - tier, # type: Union[str, "_models.AccessTierRequired"] - snapshot=None, # type: Optional[str] - version_id=None, # type: Optional[str] - timeout=None, # type: Optional[int] - rehydrate_priority=None, # type: Optional[Union[str, "_models.RehydratePriority"]] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + tier: Union[str, _models.AccessTierRequired], + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + rehydrate_priority: Optional[Union[str, _models.RehydratePriority]] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium storage account and on a block blob in a blob storage account (locally redundant storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's ETag. - :param tier: Indicates the tier to be set on the blob. + :param tier: Indicates the tier to be set on the blob. Known values are: "P4", "P6", "P10", + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and "Cold". + Required. :type tier: str or ~azure.storage.blob.models.AccessTierRequired :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived - blob. + blob. Known values are: "High" and "Standard". Default value is None. :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tier"] = kwargs.pop("comp", _params.pop("comp", "tier")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_tags = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "tier" - accept = "application/xml" - # Construct URL - url = self.set_tier.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if rehydrate_priority is not None: - header_parameters['x-ms-rehydrate-priority'] = self._serialize.header("rehydrate_priority", rehydrate_priority, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_set_tier_request( + url=self._config.url, + tier=tier, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + rehydrate_priority=rehydrate_priority, + request_id_parameter=request_id_parameter, + lease_id=_lease_id, + if_tags=_if_tags, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - if response.status_code == 200: - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - - if response.status_code == 202: - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) if cls: - return cls(pipeline_response, None, response_headers) - - set_tier.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def get_account_info( - self, - **kwargs # type: Any - ): - # type: (...) -> None + @distributed_trace + def get_account_info( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Returns the sku name and account kind. - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "account" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_account_info.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_get_account_info_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) - response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) if cls: - return cls(pipeline_response, None, response_headers) - - get_account_info.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def query( self, - snapshot=None, # type: Optional[str] - timeout=None, # type: Optional[int] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - query_request=None, # type: Optional["_models.QueryRequest"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> IO + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + query_request: Optional[_models.QueryRequest] = None, + **kwargs: Any + ) -> Iterator[bytes]: + # pylint: disable=line-too-long """The Query operation enables users to select/project on blob data by providing simple query expressions. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param query_request: the query request. - :type query_request: ~azure.storage.blob.models.QueryRequest - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IO, or the result of cls(response) - :rtype: IO - :raises: ~azure.core.exceptions.HttpResponseError + :param query_request: the query request. Default value is None. + :type query_request: ~azure.storage.blob.models.QueryRequest + :return: Iterator[bytes] or the result of cls(response) + :rtype: Iterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[IO] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["query"] = kwargs.pop("comp", _params.pop("comp", "query")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "query" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.query.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] + _if_unmodified_since = modified_access_conditions.if_unmodified_since if query_request is not None: - body_content = self._serialize.body(query_request, 'QueryRequest', is_xml=True) + _content = self._serialize.body(query_request, "QueryRequest", is_xml=True) else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs) + _content = None + + _request = build_query_request( + url=self._config.url, + snapshot=snapshot, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 206]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} if response.status_code == 200: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) if response.status_code == 206: - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) - response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) - response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) - response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) - response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) - response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) - response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) - response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize( + "str", response.headers.get("Content-Disposition") + ) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-blob-type"] = self._deserialize("str", response.headers.get("x-ms-blob-type")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-copy-completion-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-copy-completion-time") + ) + response_headers["x-ms-copy-status-description"] = self._deserialize( + "str", response.headers.get("x-ms-copy-status-description") + ) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-progress"] = self._deserialize( + "str", response.headers.get("x-ms-copy-progress") + ) + response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) + response_headers["x-ms-lease-duration"] = self._deserialize( + "str", response.headers.get("x-ms-lease-duration") + ) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-committed-block-count"] = self._deserialize( + "int", response.headers.get("x-ms-blob-committed-block-count") + ) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-blob-content-md5"] = self._deserialize( + "bytearray", response.headers.get("x-ms-blob-content-md5") + ) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - query.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore + @distributed_trace def get_tags( self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - snapshot=None, # type: Optional[str] - version_id=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> "_models.BlobTags" + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> _models.BlobTags: + # pylint: disable=line-too-long """The Get Tags operation enables users to get the tags associated with a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: BlobTags, or the result of cls(response) + :return: BlobTags or the result of cls(response) :rtype: ~azure.storage.blob.models.BlobTags - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobTags"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) + cls: ClsType[_models.BlobTags] = kwargs.pop("cls", None) + _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "tags" - accept = "application/xml" + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + _request = build_get_tags_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + if_tags=_if_tags, + lease_id=_lease_id, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_tags.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('BlobTags', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("BlobTags", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_tags.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore - def set_tags( + @distributed_trace + def set_tags( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - version_id=None, # type: Optional[str] - transactional_content_md5=None, # type: Optional[bytearray] - transactional_content_crc64=None, # type: Optional[bytearray] - request_id_parameter=None, # type: Optional[str] - tags=None, # type: Optional["_models.BlobTags"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + version_id: Optional[str] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + tags: Optional[_models.BlobTags] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Set Tags operation enables users to set tags on a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. :type version_id: str :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param tags: Blob tags. - :type tags: ~azure.storage.blob.models.BlobTags - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param tags: Blob tags. Default value is None. + :type tags: ~azure.storage.blob.models.BlobTags + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_tags = None _lease_id = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "tags" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.set_tags.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if version_id is not None: - query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if tags is not None: - body_content = self._serialize.body(tags, 'BlobTags', is_xml=True) + _content = self._serialize.body(tags, "BlobTags", is_xml=True) else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _content = None + + _request = build_set_tags_request( + url=self._config.url, + timeout=timeout, + version_id=version_id, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + request_id_parameter=request_id_parameter, + if_tags=_if_tags, + lease_id=_lease_id, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_tags.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_block_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_block_blob_operations.py index 7bb13abc2b7f..b0c20b665f1e 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_block_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_block_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,120 +7,792 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from .. import models as _models +from .._serialization import Serializer + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_upload_request( + url: str, + *, + content_length: int, + content: IO[bytes], + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[bytes] = None, + blob_cache_control: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + transactional_content_crc64: Optional[bytes] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-blob-type"] = _SERIALIZER.header("blob_type", blob_type, "str") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "bytearray" + ) + if structured_body_type is not None: + _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") + if structured_content_length is not None: + _headers["x-ms-structured-content-length"] = _SERIALIZER.header( + "structured_content_length", structured_content_length, "int" + ) + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_put_blob_from_url_request( + url: str, + *, + content_length: int, + copy_source: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[bytes] = None, + blob_cache_control: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + blob_tags_string: Optional[str] = None, + copy_source_blob_properties: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-blob-type"] = _SERIALIZER.header("blob_type", blob_type, "str") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + if source_if_tags is not None: + _headers["x-ms-source-if-tags"] = _SERIALIZER.header("source_if_tags", source_if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") + if copy_source_blob_properties is not None: + _headers["x-ms-copy-source-blob-properties"] = _SERIALIZER.header( + "copy_source_blob_properties", copy_source_blob_properties, "bool" + ) + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + if copy_source_tags is not None: + _headers["x-ms-copy-source-tag-option"] = _SERIALIZER.header("copy_source_tags", copy_source_tags, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_stage_block_request( + url: str, + *, + block_id: str, + content_length: int, + content: IO[bytes], + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + request_id_parameter: Optional[str] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["blockid"] = _SERIALIZER.query("block_id", block_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "bytearray" + ) + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if structured_body_type is not None: + _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") + if structured_content_length is not None: + _headers["x-ms-structured-content-length"] = _SERIALIZER.header( + "structured_content_length", structured_content_length, "int" + ) + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_stage_block_from_url_request( + url: str, + *, + block_id: str, + content_length: int, + source_url: str, + source_range: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, + timeout: Optional[int] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + lease_id: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["blockid"] = _SERIALIZER.query("block_id", block_id, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + _headers["x-ms-copy-source"] = _SERIALIZER.header("source_url", source_url, "str") + if source_range is not None: + _headers["x-ms-source-range"] = _SERIALIZER.header("source_range", source_range, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") + if source_contentcrc64 is not None: + _headers["x-ms-source-content-crc64"] = _SERIALIZER.header( + "source_contentcrc64", source_contentcrc64, "bytearray" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_commit_block_list_request( + url: str, + *, + content: Any, + timeout: Optional[int] = None, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[bytes] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "bytearray" + ) + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_get_block_list_request( + url: str, + *, + snapshot: Optional[str] = None, + list_type: Union[str, _models.BlockListType] = "committed", + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + _params["blocklisttype"] = _SERIALIZER.query("list_type", list_type, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class BlockBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class BlockBlobOperations(object): - """BlockBlobOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`block_blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def upload( + @distributed_trace + def upload( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - body, # type: IO - timeout=None, # type: Optional[int] - transactional_content_md5=None, # type: Optional[bytearray] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] - request_id_parameter=None, # type: Optional[str] - blob_tags_string=None, # type: Optional[str] - blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + body: IO[bytes], + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + transactional_content_crc64: Optional[bytes] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Upload Block Blob operation updates the content of an existing block blob. Updating an existing block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a partial update of the content of a block blob, use the Put Block List operation. - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytes + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -129,6 +802,7 @@ def upload( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -136,210 +810,215 @@ def upload( _if_none_match = None _if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - blob_type = "BlockBlob" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.upload.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = body + + _request = build_upload_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + transactional_content_crc64=transactional_content_crc64, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + blob_type=blob_type, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) - - upload.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def put_blob_from_url( + @distributed_trace + def put_blob_from_url( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - copy_source, # type: str - timeout=None, # type: Optional[int] - transactional_content_md5=None, # type: Optional[bytearray] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] - request_id_parameter=None, # type: Optional[str] - source_content_md5=None, # type: Optional[bytearray] - blob_tags_string=None, # type: Optional[str] - copy_source_blob_properties=None, # type: Optional[bool] - blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + copy_source: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + request_id_parameter: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + blob_tags_string: Optional[str] = None, + copy_source_blob_properties: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are not supported with Put Blob from URL; the content of an existing blob is overwritten with the content of the new blob. To perform partial updates to a block blob’s contents using a source URL, use the Put Block from URL API in conjunction with Put Block List. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + from the copy source. Default value is None. + :type source_content_md5: bytes + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str :param copy_source_blob_properties: Optional, default is true. Indicates if properties from - the source blob should be copied. + the source blob should be copied. Default value is None. :type copy_source_blob_properties: bool - :param blob_http_headers: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and + "COPY". Default value is None. + :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -349,6 +1028,7 @@ def put_blob_from_url( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -361,345 +1041,344 @@ def put_blob_from_url( _source_if_none_match = None _source_if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags + _if_unmodified_since = modified_access_conditions.if_unmodified_since if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_tags = source_modified_access_conditions.source_if_tags - blob_type = "BlockBlob" - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_put_blob_from_url_request( + url=self._config.url, + content_length=content_length, + copy_source=copy_source, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + source_if_tags=_source_if_tags, + request_id_parameter=request_id_parameter, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + copy_source_blob_properties=copy_source_blob_properties, + copy_source_authorization=copy_source_authorization, + copy_source_tags=copy_source_tags, + blob_type=blob_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.put_blob_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - if _source_if_tags is not None: - header_parameters['x-ms-source-if-tags'] = self._serialize.header("source_if_tags", _source_if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - if copy_source_blob_properties is not None: - header_parameters['x-ms-copy-source-blob-properties'] = self._serialize.header("copy_source_blob_properties", copy_source_blob_properties, 'bool') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - put_blob_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def stage_block( + @distributed_trace + def stage_block( # pylint: disable=inconsistent-return-statements self, - block_id, # type: str - content_length, # type: int - body, # type: IO - transactional_content_md5=None, # type: Optional[bytearray] - transactional_content_crc64=None, # type: Optional[bytearray] - timeout=None, # type: Optional[int] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - **kwargs # type: Any - ): - # type: (...) -> None + block_id: str, + content_length: int, + body: IO[bytes], + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Stage Block operation creates a new block to be committed as part of a blob. :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the string must be less than or equal to 64 bytes in size. For a given blob, the length of the - value specified for the blockid parameter must be the same size for each block. + value specified for the blockid parameter must be the same size for each block. Required. :type block_id: str - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - comp = "block" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.stage_block.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['blockid'] = self._serialize.query("block_id", block_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _content = body + + _request = build_stage_block_request( + url=self._config.url, + block_id=block_id, + content_length=content_length, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + request_id_parameter=request_id_parameter, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) - - stage_block.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def stage_block_from_url( + @distributed_trace + def stage_block_from_url( # pylint: disable=inconsistent-return-statements self, - block_id, # type: str - content_length, # type: int - source_url, # type: str - source_range=None, # type: Optional[str] - source_content_md5=None, # type: Optional[bytearray] - source_contentcrc64=None, # type: Optional[bytearray] - timeout=None, # type: Optional[int] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + block_id: str, + content_length: int, + source_url: str, + source_range: Optional[str] = None, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Stage Block operation creates a new block to be committed as part of a blob where the contents are read from a URL. :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the string must be less than or equal to 64 bytes in size. For a given blob, the length of the - value specified for the blockid parameter must be the same size for each block. + value specified for the blockid parameter must be the same size for each block. Required. :type block_id: str - :param content_length: The length of the request. - :type content_length: long - :param source_url: Specify a URL to the copy source. + :param content_length: The length of the request. Required. + :type content_length: int + :param source_url: Specify a URL to the copy source. Required. :type source_url: str - :param source_range: Bytes of source data in the specified range. + :param source_range: Bytes of source data in the specified range. Default value is None. :type source_range: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray + from the copy source. Default value is None. + :type source_content_md5: bytes :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. - :type source_contentcrc64: bytearray + read from the copy source. Default value is None. + :type source_contentcrc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _lease_id = None _source_if_modified_since = None @@ -707,6 +1386,7 @@ def stage_block_from_url( _source_if_match = None _source_if_none_match = None if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: @@ -714,104 +1394,96 @@ def stage_block_from_url( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match - comp = "block" - accept = "application/xml" + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_stage_block_from_url_request( + url=self._config.url, + block_id=block_id, + content_length=content_length, + source_url=source_url, + source_range=source_range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.stage_block_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['blockid'] = self._serialize.query("block_id", block_id, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') - if source_range is not None: - header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if source_contentcrc64 is not None: - header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - stage_block_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def commit_block_list( + @distributed_trace + def commit_block_list( # pylint: disable=inconsistent-return-statements self, - blocks, # type: "_models.BlockLookupList" - timeout=None, # type: Optional[int] - transactional_content_md5=None, # type: Optional[bytearray] - transactional_content_crc64=None, # type: Optional[bytearray] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] - request_id_parameter=None, # type: Optional[str] - blob_tags_string=None, # type: Optional[str] - blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + blocks: _models.BlockLookupList, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, _models.AccessTierOptional]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Commit Block List operation writes a blob by specifying the list of block IDs that make up the blob. In order to be written as part of a blob, a block must have been successfully written to the server in a prior Put Block operation. You can call Put Block List to update a blob by @@ -820,58 +1492,75 @@ def commit_block_list( or from the uncommitted block list, or to commit the most recently uploaded version of the block, whichever list it may belong to. - :param blocks: + :param blocks: Blob Blocks. Required. :type blocks: ~azure.storage.blob.models.BlockLookupList :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str - :param tier: Optional. Indicates the tier to be set on the blob. + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and + "Cold". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_cache_control = None _blob_content_type = None _blob_content_encoding = None @@ -881,6 +1570,7 @@ def commit_block_list( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -889,225 +1579,210 @@ def commit_block_list( _if_tags = None if blob_http_headers is not None: _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_type = blob_http_headers.blob_content_type + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "blocklist" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.commit_block_list.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(blocks, 'BlockLookupList', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = self._serialize.body(blocks, "BlockLookupList", is_xml=True) + + _request = build_commit_block_list_request( + url=self._config.url, + timeout=timeout, + blob_cache_control=_blob_cache_control, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - commit_block_list.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def get_block_list( self, - snapshot=None, # type: Optional[str] - list_type="committed", # type: Union[str, "_models.BlockListType"] - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> "_models.BlockList" + snapshot: Optional[str] = None, + list_type: Union[str, _models.BlockListType] = "committed", + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> _models.BlockList: + # pylint: disable=line-too-long """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block blob. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param list_type: Specifies whether to return the list of committed blocks, the list of - uncommitted blocks, or both lists together. + uncommitted blocks, or both lists together. Known values are: "committed", "uncommitted", and + "all". Default value is "committed". :type list_type: str or ~azure.storage.blob.models.BlockListType :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: BlockList, or the result of cls(response) + :return: BlockList or the result of cls(response) :rtype: ~azure.storage.blob.models.BlockList - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.BlockList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) + cls: ClsType[_models.BlockList] = kwargs.pop("cls", None) + _lease_id = None _if_tags = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_tags = modified_access_conditions.if_tags - comp = "blocklist" - accept = "application/xml" - # Construct URL - url = self.get_block_list.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - query_parameters['blocklisttype'] = self._serialize.query("list_type", list_type, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_get_block_list_request( + url=self._config.url, + snapshot=snapshot, + list_type=list_type, + timeout=timeout, + lease_id=_lease_id, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('BlockList', pipeline_response) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("BlockList", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_block_list.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_container_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_container_operations.py index 41a1c8aa2daf..e68bf2cdb49a 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_container_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_container_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,60 +7,921 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Iterator, List, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from .. import models as _models +from .._serialization import Serializer + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_create_request( + url: str, + *, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + access: Optional[Union[str, _models.PublicAccessType]] = None, + request_id_parameter: Optional[str] = None, + default_encryption_scope: Optional[str] = None, + prevent_encryption_scope_override: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if access is not None: + _headers["x-ms-blob-public-access"] = _SERIALIZER.header("access", access, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if default_encryption_scope is not None: + _headers["x-ms-default-encryption-scope"] = _SERIALIZER.header( + "default_encryption_scope", default_encryption_scope, "str" + ) + if prevent_encryption_scope_override is not None: + _headers["x-ms-deny-encryption-scope-override"] = _SERIALIZER.header( + "prevent_encryption_scope_override", prevent_encryption_scope_override, "bool" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_properties_request( + url: str, + *, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + url: str, + *, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_metadata_request( + url: str, + *, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, + if_modified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_access_policy_request( + url: str, + *, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_set_access_policy_request( + url: str, + *, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + access: Optional[Union[str, _models.PublicAccessType]] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if access is not None: + _headers["x-ms-blob-public-access"] = _SERIALIZER.header("access", access, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_restore_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + deleted_container_name: Optional[str] = None, + deleted_container_version: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if deleted_container_name is not None: + _headers["x-ms-deleted-container-name"] = _SERIALIZER.header( + "deleted_container_name", deleted_container_name, "str" + ) + if deleted_container_version is not None: + _headers["x-ms-deleted-container-version"] = _SERIALIZER.header( + "deleted_container_version", deleted_container_version, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_rename_request( + url: str, + *, + source_container_name: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + source_lease_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["x-ms-source-container-name"] = _SERIALIZER.header("source_container_name", source_container_name, "str") + if source_lease_id is not None: + _headers["x-ms-source-lease-id"] = _SERIALIZER.header("source_lease_id", source_lease_id, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_submit_batch_request( + url: str, + *, + content_length: int, + content: IO[bytes], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) + multipart_content_type: Optional[str] = kwargs.pop("multipart_content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if multipart_content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("multipart_content_type", multipart_content_type, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_filter_blobs_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if where is not None: + _params["where"] = _SERIALIZER.query("where", where, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_acquire_lease_request( + url: str, + *, + timeout: Optional[int] = None, + duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + if duration is not None: + _headers["x-ms-lease-duration"] = _SERIALIZER.header("duration", duration, "int") + if proposed_lease_id is not None: + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_release_lease_request( + url: str, + *, + lease_id: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_renew_lease_request( + url: str, + *, + lease_id: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_break_lease_request( + url: str, + *, + timeout: Optional[int] = None, + break_period: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + if break_period is not None: + _headers["x-ms-lease-break-period"] = _SERIALIZER.header("break_period", break_period, "int") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_change_lease_request( + url: str, + *, + lease_id: str, + proposed_lease_id: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str") + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_blob_flat_segment_request( + url: str, + *, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if prefix is not None: + _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long + url: str, + *, + delimiter: str, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if prefix is not None: + _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") + _params["delimiter"] = _SERIALIZER.query("delimiter", delimiter, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_account_info_request( + url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class ContainerOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class ContainerOperations(object): - """ContainerOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`container` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def create( + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - access=None, # type: Optional[Union[str, "_models.PublicAccessType"]] - request_id_parameter=None, # type: Optional[str] - container_cpk_scope_info=None, # type: Optional["_models.ContainerCpkScopeInfo"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + access: Optional[Union[str, _models.PublicAccessType]] = None, + request_id_parameter: Optional[str] = None, + container_cpk_scope_info: Optional[_models.ContainerCpkScopeInfo] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """creates a new container under the specified account. If the container with the same name already exists, the operation fails. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -67,209 +929,227 @@ def create( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str + information. Default value is None. + :type metadata: dict[str, str] :param access: Specifies whether data in the container may be accessed publicly and the level - of access. + of access. Known values are: "container" and "blob". Default value is None. :type access: str or ~azure.storage.blob.models.PublicAccessType :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param container_cpk_scope_info: Parameter group. + :param container_cpk_scope_info: Parameter group. Default value is None. :type container_cpk_scope_info: ~azure.storage.blob.models.ContainerCpkScopeInfo - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + cls: ClsType[None] = kwargs.pop("cls", None) + _default_encryption_scope = None _prevent_encryption_scope_override = None if container_cpk_scope_info is not None: _default_encryption_scope = container_cpk_scope_info.default_encryption_scope _prevent_encryption_scope_override = container_cpk_scope_info.prevent_encryption_scope_override - restype = "container" - accept = "application/xml" - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if access is not None: - header_parameters['x-ms-blob-public-access'] = self._serialize.header("access", access, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if _default_encryption_scope is not None: - header_parameters['x-ms-default-encryption-scope'] = self._serialize.header("default_encryption_scope", _default_encryption_scope, 'str') - if _prevent_encryption_scope_override is not None: - header_parameters['x-ms-deny-encryption-scope-override'] = self._serialize.header("prevent_encryption_scope_override", _prevent_encryption_scope_override, 'bool') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_create_request( + url=self._config.url, + timeout=timeout, + metadata=metadata, + access=access, + request_id_parameter=request_id_parameter, + default_encryption_scope=_default_encryption_scope, + prevent_encryption_scope_override=_prevent_encryption_scope_override, + restype=restype, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - create.metadata = {'url': '/{containerName}'} # type: ignore - - def get_properties( + @distributed_trace + def get_properties( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """returns all user-defined metadata and system properties for the specified container. The data returned does not include the container's list of blobs. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id - restype = "container" - accept = "application/xml" - # Construct URL - url = self.get_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_get_properties_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + restype=restype, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-meta']=self._deserialize('str', response.headers.get('x-ms-meta')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) - response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) - response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) - response_headers['x-ms-has-immutability-policy']=self._deserialize('bool', response.headers.get('x-ms-has-immutability-policy')) - response_headers['x-ms-has-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-has-legal-hold')) - response_headers['x-ms-default-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-default-encryption-scope')) - response_headers['x-ms-deny-encryption-scope-override']=self._deserialize('bool', response.headers.get('x-ms-deny-encryption-scope-override')) + response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-blob-public-access"] = self._deserialize( + "str", response.headers.get("x-ms-blob-public-access") + ) + response_headers["x-ms-has-immutability-policy"] = self._deserialize( + "bool", response.headers.get("x-ms-has-immutability-policy") + ) + response_headers["x-ms-has-legal-hold"] = self._deserialize("bool", response.headers.get("x-ms-has-legal-hold")) + response_headers["x-ms-default-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-default-encryption-scope") + ) + response_headers["x-ms-deny-encryption-scope-override"] = self._deserialize( + "bool", response.headers.get("x-ms-deny-encryption-scope-override") + ) + response_headers["x-ms-immutable-storage-with-versioning-enabled"] = self._deserialize( + "bool", response.headers.get("x-ms-immutable-storage-with-versioning-enabled") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - get_properties.metadata = {'url': '/{containerName}'} # type: ignore - - def delete( + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """operation marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -278,70 +1158,61 @@ def delete( if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - restype = "container" - accept = "application/xml" - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _request = build_delete_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + restype=restype, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - delete.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def set_metadata( + @distributed_trace + def set_metadata( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - metadata=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """operation sets one or more user-defined name-value pairs for the specified container. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -349,212 +1220,226 @@ def set_metadata( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str + information. Default value is None. + :type metadata: dict[str, str] :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since - restype = "container" - comp = "metadata" - accept = "application/xml" - - # Construct URL - url = self.set_metadata.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_set_metadata_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + metadata=metadata, + if_modified_since=_if_modified_since, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_metadata.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def get_access_policy( self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> List["_models.SignedIdentifier"] + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + **kwargs: Any + ) -> List[_models.SignedIdentifier]: + # pylint: disable=line-too-long """gets the permissions for the specified container. The permissions indicate whether container data may be accessed publicly. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: list of SignedIdentifier, or the result of cls(response) + :return: list of SignedIdentifier or the result of cls(response) :rtype: list[~azure.storage.blob.models.SignedIdentifier] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[List["_models.SignedIdentifier"]] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) + cls: ClsType[List[_models.SignedIdentifier]] = kwargs.pop("cls", None) + _lease_id = None if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id - restype = "container" - comp = "acl" - accept = "application/xml" - - # Construct URL - url = self.get_access_policy.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_get_access_policy_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('[SignedIdentifier]', pipeline_response) + response_headers["x-ms-blob-public-access"] = self._deserialize( + "str", response.headers.get("x-ms-blob-public-access") + ) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_access_policy.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore - def set_access_policy( + @distributed_trace + def set_access_policy( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - access=None, # type: Optional[Union[str, "_models.PublicAccessType"]] - request_id_parameter=None, # type: Optional[str] - container_acl=None, # type: Optional[List["_models.SignedIdentifier"]] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + access: Optional[Union[str, _models.PublicAccessType]] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + container_acl: Optional[List[_models.SignedIdentifier]] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """sets the permissions for the specified container. The permissions indicate whether blobs in a container may be accessed publicly. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param access: Specifies whether data in the container may be accessed publicly and the level - of access. + of access. Known values are: "container" and "blob". Default value is None. :type access: str or ~azure.storage.blob.models.PublicAccessType :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param container_acl: the acls for the container. - :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param container_acl: the acls for the container. Default value is None. + :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -563,920 +1448,1199 @@ def set_access_policy( if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - restype = "container" - comp = "acl" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.set_access_policy.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if access is not None: - header_parameters['x-ms-blob-public-access'] = self._serialize.header("access", access, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - serialization_ctxt = {'xml': {'name': 'SignedIdentifiers', 'wrapped': True, 'itemsName': 'SignedIdentifier'}} + serialization_ctxt = {"xml": {"name": "SignedIdentifiers", "wrapped": True, "itemsName": "SignedIdentifier"}} if container_acl is not None: - body_content = self._serialize.body(container_acl, '[SignedIdentifier]', is_xml=True, serialization_ctxt=serialization_ctxt) + _content = self._serialize.body( + container_acl, "[SignedIdentifier]", is_xml=True, serialization_ctxt=serialization_ctxt + ) else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _content = None + + _request = build_set_access_policy_request( + url=self._config.url, + timeout=timeout, + lease_id=_lease_id, + access=access, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - set_access_policy.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def restore( + @distributed_trace + def restore( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - deleted_container_name=None, # type: Optional[str] - deleted_container_version=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + deleted_container_name: Optional[str] = None, + deleted_container_version: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Restores a previously-deleted container. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param deleted_container_name: Optional. Version 2019-12-12 and later. Specifies the name of - the deleted container to restore. + the deleted container to restore. Default value is None. :type deleted_container_name: str :param deleted_container_version: Optional. Version 2019-12-12 and later. Specifies the - version of the deleted container to restore. + version of the deleted container to restore. Default value is None. :type deleted_container_version: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "container" - comp = "undelete" - accept = "application/xml" - - # Construct URL - url = self.restore.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if deleted_container_name is not None: - header_parameters['x-ms-deleted-container-name'] = self._serialize.header("deleted_container_name", deleted_container_name, 'str') - if deleted_container_version is not None: - header_parameters['x-ms-deleted-container-version'] = self._serialize.header("deleted_container_version", deleted_container_version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_restore_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + deleted_container_name=deleted_container_name, + deleted_container_version=deleted_container_version, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def rename( # pylint: disable=inconsistent-return-statements + self, + source_container_name: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + source_lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long + """Renames an existing container. + + :param source_container_name: Required. Specifies the name of the container to rename. + Required. + :type source_container_name: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param source_lease_id: A lease ID for the source path. If specified, the source path must have + an active lease and the lease ID must match. Default value is None. + :type source_lease_id: str + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_rename_request( + url=self._config.url, + source_container_name=source_container_name, + timeout=timeout, + request_id_parameter=request_id_parameter, + source_lease_id=source_lease_id, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def submit_batch( + self, + content_length: int, + body: IO[bytes], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> Iterator[bytes]: + # pylint: disable=line-too-long + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: Iterator[bytes] or the result of cls(response) + :rtype: Iterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) + multipart_content_type: str = kwargs.pop( + "multipart_content_type", _headers.pop("Content-Type", "application/xml") + ) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _content = body + + _request = build_submit_batch_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + multipart_content_type=multipart_content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - restore.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore - def acquire_lease( + @distributed_trace + def filter_blobs( self, - timeout=None, # type: Optional[int] - duration=None, # type: Optional[int] - proposed_lease_id=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, + **kwargs: Any + ) -> _models.FilterBlobSegment: + # pylint: disable=line-too-long + """The Filter Blobs operation enables callers to list blobs in a container whose tags match a + given search expression. Filter blobs searches within the given container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param where: Filters the results to return only to return only blobs whose tags match the + specified expression. Default value is None. + :type where: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :type include: list[str or ~azure.storage.blob.models.FilterBlobsIncludeItem] + :return: FilterBlobSegment or the result of cls(response) + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) + cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) + + _request = build_filter_blobs_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + where=where, + marker=marker, + maxresults=maxresults, + include=include, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def acquire_lease( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease - duration cannot be changed using renew or change. + duration cannot be changed using renew or change. Default value is None. :type duration: int :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Default value is None. :type proposed_lease_id: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "acquire" - accept = "application/xml" - - # Construct URL - url = self.acquire_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if duration is not None: - header_parameters['x-ms-lease-duration'] = self._serialize.header("duration", duration, 'int') - if proposed_lease_id is not None: - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_acquire_lease_request( + url=self._config.url, + timeout=timeout, + duration=duration, + proposed_lease_id=proposed_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - acquire_lease.metadata = {'url': '/{containerName}'} # type: ignore - - def release_lease( + @distributed_trace + def release_lease( # pylint: disable=inconsistent-return-statements self, - lease_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "release" - accept = "application/xml" - - # Construct URL - url = self.release_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_release_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - release_lease.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def renew_lease( + @distributed_trace + def renew_lease( # pylint: disable=inconsistent-return-statements self, - lease_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "renew" - accept = "application/xml" - - # Construct URL - url = self.renew_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_renew_lease_request( + url=self._config.url, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - renew_lease.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def break_lease( + @distributed_trace + def break_lease( # pylint: disable=inconsistent-return-statements self, - timeout=None, # type: Optional[int] - break_period=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + timeout: Optional[int] = None, + break_period: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param break_period: For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for - longer than the break period. If this header does not appear with a break operation, a fixed- - duration lease breaks after the remaining lease period elapses, and an infinite lease breaks - immediately. + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. Default value is None. :type break_period: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "break" - accept = "application/xml" - - # Construct URL - url = self.break_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - if break_period is not None: - header_parameters['x-ms-lease-break-period'] = self._serialize.header("break_period", break_period, 'int') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_break_lease_request( + url=self._config.url, + timeout=timeout, + break_period=break_period, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - break_lease.metadata = {'url': '/{containerName}'} # type: ignore - - def change_lease( + @distributed_trace + def change_lease( # pylint: disable=inconsistent-return-statements self, - lease_id, # type: str - proposed_lease_id, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + lease_id: str, + proposed_lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. - :param lease_id: Specifies the current lease ID on the resource. + :param lease_id: Specifies the current lease ID on the resource. Required. :type lease_id: str :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. + Constructor (String) for a list of valid GUID string formats. Required. :type proposed_lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None if modified_access_conditions is not None: _if_modified_since = modified_access_conditions.if_modified_since _if_unmodified_since = modified_access_conditions.if_unmodified_since - comp = "lease" - restype = "container" - action = "change" - accept = "application/xml" - - # Construct URL - url = self.change_lease.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-lease-action'] = self._serialize.header("action", action, 'str') - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str') - header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + _request = build_change_lease_request( + url=self._config.url, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - change_lease.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def list_blob_flat_segment( self, - prefix=None, # type: Optional[str] - marker=None, # type: Optional[str] - maxresults=None, # type: Optional[int] - include=None, # type: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "_models.ListBlobsFlatSegmentResponse" + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> _models.ListBlobsFlatSegmentResponse: + # pylint: disable=line-too-long """[Update] The List Blobs operation returns a list of the blobs under the specified container. :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. + specified prefix. Default value is None. :type prefix: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int :param include: Include this parameter to specify one or more datasets to include in the - response. + response. Default value is None. :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListBlobsFlatSegmentResponse, or the result of cls(response) + :return: ListBlobsFlatSegmentResponse or the result of cls(response) :rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsFlatSegmentResponse"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - restype = "container" - comp = "list" - accept = "application/xml" - - # Construct URL - url = self.list_blob_flat_segment.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if prefix is not None: - query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - if include is not None: - query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + cls: ClsType[_models.ListBlobsFlatSegmentResponse] = kwargs.pop("cls", None) + + _request = build_list_blob_flat_segment_request( + url=self._config.url, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('ListBlobsFlatSegmentResponse', pipeline_response) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("ListBlobsFlatSegmentResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - list_blob_flat_segment.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore + @distributed_trace def list_blob_hierarchy_segment( self, - delimiter, # type: str - prefix=None, # type: Optional[str] - marker=None, # type: Optional[str] - maxresults=None, # type: Optional[int] - include=None, # type: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "_models.ListBlobsHierarchySegmentResponse" + delimiter: str, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListBlobsIncludeItem]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> _models.ListBlobsHierarchySegmentResponse: + # pylint: disable=line-too-long """[Update] The List Blobs operation returns a list of the blobs under the specified container. :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix element in the response body that acts as a placeholder for all blobs whose names begin with the same substring up to the appearance of the delimiter character. The delimiter may be a - single character or a string. + single character or a string. Required. :type delimiter: str :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. + specified prefix. Default value is None. :type prefix: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int :param include: Include this parameter to specify one or more datasets to include in the - response. + response. Default value is None. :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListBlobsHierarchySegmentResponse, or the result of cls(response) + :return: ListBlobsHierarchySegmentResponse or the result of cls(response) :rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsHierarchySegmentResponse"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - restype = "container" - comp = "list" - accept = "application/xml" - - # Construct URL - url = self.list_blob_hierarchy_segment.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if prefix is not None: - query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') - query_parameters['delimiter'] = self._serialize.query("delimiter", delimiter, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - if include is not None: - query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + cls: ClsType[_models.ListBlobsHierarchySegmentResponse] = kwargs.pop("cls", None) + + _request = build_list_blob_hierarchy_segment_request( + url=self._config.url, + delimiter=delimiter, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('ListBlobsHierarchySegmentResponse', pipeline_response) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - list_blob_hierarchy_segment.metadata = {'url': '/{containerName}'} # type: ignore + return deserialized # type: ignore - def get_account_info( - self, - **kwargs # type: Any - ): - # type: (...) -> None + @distributed_trace + def get_account_info( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Returns the sku name and account kind. - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "account" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_account_info.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_get_account_info_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) - response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) if cls: - return cls(pipeline_response, None, response_headers) - - get_account_info.metadata = {'url': '/{containerName}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_directory_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_directory_operations.py deleted file mode 100644 index f025757048df..000000000000 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_directory_operations.py +++ /dev/null @@ -1,748 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse - -from .. import models as _models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class DirectoryOperations(object): - """DirectoryOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = _models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def create( - self, - timeout=None, # type: Optional[int] - directory_properties=None, # type: Optional[str] - posix_permissions=None, # type: Optional[str] - posix_umask=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - directory_http_headers=None, # type: Optional["_models.DirectoryHttpHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None - """Create a directory. By default, the destination is overwritten and if the destination already - exists and has a lease the lease is broken. This operation supports conditional HTTP requests. - For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. To fail if the destination already exists, use a conditional - request with If-None-Match: "*". - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. - :type timeout: int - :param directory_properties: Optional. User-defined properties to be stored with the file or - directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", - where each value is base64 encoded. - :type directory_properties: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_umask: Only valid if Hierarchical Namespace is enabled for the account. This umask - restricts permission settings for file and directory, and will only be applied when default Acl - does not exist in parent directory. If the umask bit has set, it means that the corresponding - permission will be disabled. Otherwise the corresponding permission will be determined by the - permission. A 4-digit octal notation (e.g. 0022) is supported here. If no umask was specified, - a default umask - 0027 will be used. - :type posix_umask: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. - :type request_id_parameter: str - :param directory_http_headers: Parameter group. - :type directory_http_headers: ~azure.storage.blob.models.DirectoryHttpHeaders - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - _cache_control = None - _content_type = None - _content_encoding = None - _content_language = None - _content_disposition = None - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - if directory_http_headers is not None: - _cache_control = directory_http_headers.cache_control - _content_type = directory_http_headers.content_type - _content_encoding = directory_http_headers.content_encoding - _content_language = directory_http_headers.content_language - _content_disposition = directory_http_headers.content_disposition - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - resource = "directory" - accept = "application/xml" - - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['resource'] = self._serialize.query("resource", resource, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if directory_properties is not None: - header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_umask is not None: - header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str') - if _cache_control is not None: - header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str') - if _content_type is not None: - header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str') - if _content_encoding is not None: - header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str') - if _content_language is not None: - header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str') - if _content_disposition is not None: - header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - - if cls: - return cls(pipeline_response, None, response_headers) - - create.metadata = {'url': '/{filesystem}/{path}'} # type: ignore - - def rename( - self, - rename_source, # type: str - timeout=None, # type: Optional[int] - marker=None, # type: Optional[str] - path_rename_mode=None, # type: Optional[Union[str, "_models.PathRenameMode"]] - directory_properties=None, # type: Optional[str] - posix_permissions=None, # type: Optional[str] - posix_umask=None, # type: Optional[str] - source_lease_id=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - directory_http_headers=None, # type: Optional["_models.DirectoryHttpHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None - """Rename a directory. By default, the destination is overwritten and if the destination already - exists and has a lease the lease is broken. This operation supports conditional HTTP requests. - For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. To fail if the destination already exists, use a conditional - request with If-None-Match: "*". - - :param rename_source: The file or directory to be renamed. The value must have the following - format: "/{filesysystem}/{path}". If "x-ms-properties" is specified, the properties will - overwrite the existing properties; otherwise, the existing properties will be preserved. - :type rename_source: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. - :type timeout: int - :param marker: When renaming a directory, the number of paths that are renamed with each - invocation is limited. If the number of paths to be renamed exceeds this limit, a continuation - token is returned in this response header. When a continuation token is returned in the - response, it must be specified in a subsequent invocation of the rename operation to continue - renaming the directory. - :type marker: str - :param path_rename_mode: Determines the behavior of the rename operation. - :type path_rename_mode: str or ~azure.storage.blob.models.PathRenameMode - :param directory_properties: Optional. User-defined properties to be stored with the file or - directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", - where each value is base64 encoded. - :type directory_properties: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_umask: Only valid if Hierarchical Namespace is enabled for the account. This umask - restricts permission settings for file and directory, and will only be applied when default Acl - does not exist in parent directory. If the umask bit has set, it means that the corresponding - permission will be disabled. Otherwise the corresponding permission will be determined by the - permission. A 4-digit octal notation (e.g. 0022) is supported here. If no umask was specified, - a default umask - 0027 will be used. - :type posix_umask: str - :param source_lease_id: A lease ID for the source path. If specified, the source path must have - an active lease and the lease ID must match. - :type source_lease_id: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. - :type request_id_parameter: str - :param directory_http_headers: Parameter group. - :type directory_http_headers: ~azure.storage.blob.models.DirectoryHttpHeaders - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - _cache_control = None - _content_type = None - _content_encoding = None - _content_language = None - _content_disposition = None - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - if directory_http_headers is not None: - _cache_control = directory_http_headers.cache_control - _content_type = directory_http_headers.content_type - _content_encoding = directory_http_headers.content_encoding - _content_language = directory_http_headers.content_language - _content_disposition = directory_http_headers.content_disposition - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_none_match = source_modified_access_conditions.source_if_none_match - accept = "application/xml" - - # Construct URL - url = self.rename.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if marker is not None: - query_parameters['continuation'] = self._serialize.query("marker", marker, 'str') - if path_rename_mode is not None: - query_parameters['mode'] = self._serialize.query("path_rename_mode", path_rename_mode, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-rename-source'] = self._serialize.header("rename_source", rename_source, 'str') - if directory_properties is not None: - header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_umask is not None: - header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str') - if _cache_control is not None: - header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str') - if _content_type is not None: - header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str') - if _content_encoding is not None: - header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str') - if _content_language is not None: - header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str') - if _content_disposition is not None: - header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if source_lease_id is not None: - header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - - if cls: - return cls(pipeline_response, None, response_headers) - - rename.metadata = {'url': '/{filesystem}/{path}'} # type: ignore - - def delete( - self, - recursive_directory_delete, # type: bool - timeout=None, # type: Optional[int] - marker=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes the directory. - - :param recursive_directory_delete: If "true", all paths beneath the directory will be deleted. - If "false" and the directory is non-empty, an error occurs. - :type recursive_directory_delete: bool - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. - :type timeout: int - :param marker: When renaming a directory, the number of paths that are renamed with each - invocation is limited. If the number of paths to be renamed exceeds this limit, a continuation - token is returned in this response header. When a continuation token is returned in the - response, it must be specified in a subsequent invocation of the rename operation to continue - renaming the directory. - :type marker: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_modified_since = None - _if_unmodified_since = None - _if_match = None - _if_none_match = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - accept = "application/xml" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - query_parameters['recursive'] = self._serialize.query("recursive_directory_delete", recursive_directory_delete, 'bool') - if marker is not None: - query_parameters['continuation'] = self._serialize.query("marker", marker, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - - if cls: - return cls(pipeline_response, None, response_headers) - - delete.metadata = {'url': '/{filesystem}/{path}'} # type: ignore - - def set_access_control( - self, - timeout=None, # type: Optional[int] - owner=None, # type: Optional[str] - group=None, # type: Optional[str] - posix_permissions=None, # type: Optional[str] - posix_acl=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None - """Set the owner, group, permissions, or access control list for a directory. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. - :type timeout: int - :param owner: Optional. The owner of the blob or directory. - :type owner: str - :param group: Optional. The owning group of the blob or directory. - :type group: str - :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - :type posix_permissions: str - :param posix_acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". - :type posix_acl: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - action = "setAccessControl" - accept = "application/xml" - - # Construct URL - url = self.set_access_control.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['action'] = self._serialize.query("action", action, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if owner is not None: - header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str') - if group is not None: - header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str') - if posix_permissions is not None: - header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') - if posix_acl is not None: - header_parameters['x-ms-acl'] = self._serialize.header("posix_acl", posix_acl, 'str') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.patch(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - - if cls: - return cls(pipeline_response, None, response_headers) - - set_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore - - def get_access_control( - self, - timeout=None, # type: Optional[int] - upn=None, # type: Optional[bool] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None - """Get the owner, group, permissions, or access control list for a directory. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. - :type timeout: int - :param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If - "true", the identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response - headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If - "false", the values will be returned as Azure Active Directory Object IDs. The default value is - false. - :type upn: bool - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. - :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. - :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_none_match = modified_access_conditions.if_none_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - action = "getAccessControl" - accept = "application/xml" - - # Construct URL - url = self.get_access_control.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['action'] = self._serialize.query("action", action, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if upn is not None: - query_parameters['upn'] = self._serialize.query("upn", upn, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.head(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataLakeStorageError, response) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-owner']=self._deserialize('str', response.headers.get('x-ms-owner')) - response_headers['x-ms-group']=self._deserialize('str', response.headers.get('x-ms-group')) - response_headers['x-ms-permissions']=self._deserialize('str', response.headers.get('x-ms-permissions')) - response_headers['x-ms-acl']=self._deserialize('str', response.headers.get('x-ms-acl')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - - if cls: - return cls(pipeline_response, None, response_headers) - - get_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_page_blob_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_page_blob_operations.py index e7f8a0223351..96fd5d1c503f 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_page_blob_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_page_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,75 +7,858 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from .. import models as _models +from .._serialization import Serializer + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_create_request( + url: str, + *, + content_length: int, + blob_content_length: int, + timeout: Optional[int] = None, + tier: Optional[Union[str, _models.PremiumPageBlobAccessTier]] = None, + blob_content_type: Optional[str] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_md5: Optional[bytes] = None, + blob_cache_control: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, + lease_id: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + blob_sequence_number: int = 0, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + blob_type: Literal["PageBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "PageBlob")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-blob-type"] = _SERIALIZER.header("blob_type", blob_type, "str") + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if tier is not None: + _headers["x-ms-access-tier"] = _SERIALIZER.header("tier", tier, "str") + if blob_content_type is not None: + _headers["x-ms-blob-content-type"] = _SERIALIZER.header("blob_content_type", blob_content_type, "str") + if blob_content_encoding is not None: + _headers["x-ms-blob-content-encoding"] = _SERIALIZER.header( + "blob_content_encoding", blob_content_encoding, "str" + ) + if blob_content_language is not None: + _headers["x-ms-blob-content-language"] = _SERIALIZER.header( + "blob_content_language", blob_content_language, "str" + ) + if blob_content_md5 is not None: + _headers["x-ms-blob-content-md5"] = _SERIALIZER.header("blob_content_md5", blob_content_md5, "bytearray") + if blob_cache_control is not None: + _headers["x-ms-blob-cache-control"] = _SERIALIZER.header("blob_cache_control", blob_cache_control, "str") + if metadata is not None: + _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if blob_content_disposition is not None: + _headers["x-ms-blob-content-disposition"] = _SERIALIZER.header( + "blob_content_disposition", blob_content_disposition, "str" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-blob-content-length"] = _SERIALIZER.header("blob_content_length", blob_content_length, "int") + if blob_sequence_number is not None: + _headers["x-ms-blob-sequence-number"] = _SERIALIZER.header("blob_sequence_number", blob_sequence_number, "int") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if blob_tags_string is not None: + _headers["x-ms-tags"] = _SERIALIZER.header("blob_tags_string", blob_tags_string, "str") + if immutability_policy_expiry is not None: + _headers["x-ms-immutability-policy-until-date"] = _SERIALIZER.header( + "immutability_policy_expiry", immutability_policy_expiry, "rfc-1123" + ) + if immutability_policy_mode is not None: + _headers["x-ms-immutability-policy-mode"] = _SERIALIZER.header( + "immutability_policy_mode", immutability_policy_mode, "str" + ) + if legal_hold is not None: + _headers["x-ms-legal-hold"] = _SERIALIZER.header("legal_hold", legal_hold, "bool") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_upload_pages_request( + url: str, + *, + content_length: int, + content: IO[bytes], + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-page-write"] = _SERIALIZER.header("page_write", page_write, "str") + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if transactional_content_md5 is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_md5", transactional_content_md5, "bytearray" + ) + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "bytearray" + ) + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_sequence_number_less_than_or_equal_to is not None: + _headers["x-ms-if-sequence-number-le"] = _SERIALIZER.header( + "if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, "int" + ) + if if_sequence_number_less_than is not None: + _headers["x-ms-if-sequence-number-lt"] = _SERIALIZER.header( + "if_sequence_number_less_than", if_sequence_number_less_than, "int" + ) + if if_sequence_number_equal_to is not None: + _headers["x-ms-if-sequence-number-eq"] = _SERIALIZER.header( + "if_sequence_number_equal_to", if_sequence_number_equal_to, "int" + ) + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if structured_body_type is not None: + _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") + if structured_content_length is not None: + _headers["x-ms-structured-content-length"] = _SERIALIZER.header( + "structured_content_length", structured_content_length, "int" + ) + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_clear_pages_request( + url: str, + *, + content_length: int, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["clear"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "clear")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-page-write"] = _SERIALIZER.header("page_write", page_write, "str") + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_sequence_number_less_than_or_equal_to is not None: + _headers["x-ms-if-sequence-number-le"] = _SERIALIZER.header( + "if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, "int" + ) + if if_sequence_number_less_than is not None: + _headers["x-ms-if-sequence-number-lt"] = _SERIALIZER.header( + "if_sequence_number_less_than", if_sequence_number_less_than, "int" + ) + if if_sequence_number_equal_to is not None: + _headers["x-ms-if-sequence-number-eq"] = _SERIALIZER.header( + "if_sequence_number_equal_to", if_sequence_number_equal_to, "int" + ) + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_upload_pages_from_url_request( + url: str, + *, + source_url: str, + source_range: str, + content_length: int, + range: str, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, + timeout: Optional[int] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + lease_id: Optional[str] = None, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-page-write"] = _SERIALIZER.header("page_write", page_write, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("source_url", source_url, "str") + _headers["x-ms-source-range"] = _SERIALIZER.header("source_range", source_range, "str") + if source_content_md5 is not None: + _headers["x-ms-source-content-md5"] = _SERIALIZER.header("source_content_md5", source_content_md5, "bytearray") + if source_contentcrc64 is not None: + _headers["x-ms-source-content-crc64"] = _SERIALIZER.header( + "source_contentcrc64", source_contentcrc64, "bytearray" + ) + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_sequence_number_less_than_or_equal_to is not None: + _headers["x-ms-if-sequence-number-le"] = _SERIALIZER.header( + "if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, "int" + ) + if if_sequence_number_less_than is not None: + _headers["x-ms-if-sequence-number-lt"] = _SERIALIZER.header( + "if_sequence_number_less_than", if_sequence_number_less_than, "int" + ) + if if_sequence_number_equal_to is not None: + _headers["x-ms-if-sequence-number-eq"] = _SERIALIZER.header( + "if_sequence_number_equal_to", if_sequence_number_equal_to, "int" + ) + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if copy_source_authorization is not None: + _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( + "copy_source_authorization", copy_source_authorization, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_page_ranges_request( + url: str, + *, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + + # Construct headers + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_page_ranges_diff_request( + url: str, + *, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + prevsnapshot: Optional[str] = None, + prev_snapshot_url: Optional[str] = None, + range: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if snapshot is not None: + _params["snapshot"] = _SERIALIZER.query("snapshot", snapshot, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if prevsnapshot is not None: + _params["prevsnapshot"] = _SERIALIZER.query("prevsnapshot", prevsnapshot, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + + # Construct headers + if prev_snapshot_url is not None: + _headers["x-ms-previous-snapshot-url"] = _SERIALIZER.header("prev_snapshot_url", prev_snapshot_url, "str") + if range is not None: + _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_resize_request( + url: str, + *, + blob_content_length: int, + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + encryption_scope: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if encryption_scope is not None: + _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-blob-content-length"] = _SERIALIZER.header("blob_content_length", blob_content_length, "int") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_sequence_number_request( + url: str, + *, + sequence_number_action: Union[str, _models.SequenceNumberActionType], + timeout: Optional[int] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + blob_sequence_number: int = 0, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-sequence-number-action"] = _SERIALIZER.header( + "sequence_number_action", sequence_number_action, "str" + ) + if blob_sequence_number is not None: + _headers["x-ms-blob-sequence-number"] = _SERIALIZER.header("blob_sequence_number", blob_sequence_number, "int") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_copy_incremental_request( + url: str, + *, + copy_source: str, + timeout: Optional[int] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["incrementalcopy"] = kwargs.pop("comp", _params.pop("comp", "incrementalcopy")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + if if_tags is not None: + _headers["x-ms-if-tags"] = _SERIALIZER.header("if_tags", if_tags, "str") + _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class PageBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class PageBlobOperations(object): - """PageBlobOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`page_blob` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def create( + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - blob_content_length, # type: int - timeout=None, # type: Optional[int] - tier=None, # type: Optional[Union[str, "_models.PremiumPageBlobAccessTier"]] - metadata=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - blob_sequence_number=0, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - blob_tags_string=None, # type: Optional[str] - blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + blob_content_length: int, + timeout: Optional[int] = None, + tier: Optional[Union[str, _models.PremiumPageBlobAccessTier]] = None, + metadata: Optional[Dict[str, str]] = None, + blob_sequence_number: int = 0, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, _models.BlobImmutabilityPolicyMode]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Create operation creates a new page blob. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 - TB. The page blob size must be aligned to a 512-byte boundary. - :type blob_content_length: long + TB. The page blob size must be aligned to a 512-byte boundary. Required. + :type blob_content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param tier: Optional. Indicates the tier to be set on the page blob. + :param tier: Optional. Indicates the tier to be set on the page blob. Known values are: "P4", + "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", and "P80". Default value is None. :type tier: str or ~azure.storage.blob.models.PremiumPageBlobAccessTier :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob @@ -82,42 +866,55 @@ def create( blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more - information. - :type metadata: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str + information. Default value is None. + :type metadata: dict[str, str] :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 - and 2^63 - 1. - :type blob_sequence_number: long + and 2^63 - 1. Default value is 0. + :type blob_sequence_number: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param blob_tags_string: Optional. Used to set blob tags in various blob operations. + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. :type blob_tags_string: str - :param blob_http_headers: Parameter group. + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Known values are: "Mutable", "Unlocked", and "Locked". Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + blob_type: Literal["PageBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "PageBlob")) + cls: ClsType[None] = kwargs.pop("cls", None) + _blob_content_type = None _blob_content_encoding = None _blob_content_language = None @@ -127,6 +924,7 @@ def create( _blob_content_disposition = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None @@ -134,184 +932,184 @@ def create( _if_none_match = None _if_tags = None if blob_http_headers is not None: - _blob_content_type = blob_http_headers.blob_content_type + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_disposition = blob_http_headers.blob_content_disposition _blob_content_encoding = blob_http_headers.blob_content_encoding _blob_content_language = blob_http_headers.blob_content_language _blob_content_md5 = blob_http_headers.blob_content_md5 - _blob_cache_control = blob_http_headers.blob_cache_control - _blob_content_disposition = blob_http_headers.blob_content_disposition + _blob_content_type = blob_http_headers.blob_content_type + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - blob_type = "PageBlob" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_create_request( + url=self._config.url, + content_length=content_length, + blob_content_length=blob_content_length, + timeout=timeout, + tier=tier, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_sequence_number=blob_sequence_number, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + blob_type=blob_type, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-blob-type'] = self._serialize.header("blob_type", blob_type, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if tier is not None: - header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str') - if _blob_content_type is not None: - header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", _blob_content_type, 'str') - if _blob_content_encoding is not None: - header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", _blob_content_encoding, 'str') - if _blob_content_language is not None: - header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", _blob_content_language, 'str') - if _blob_content_md5 is not None: - header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", _blob_content_md5, 'bytearray') - if _blob_cache_control is not None: - header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", _blob_cache_control, 'str') - if metadata is not None: - header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _blob_content_disposition is not None: - header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", _blob_content_disposition, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long') - if blob_sequence_number is not None: - header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - if blob_tags_string is not None: - header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - create.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def upload_pages( + @distributed_trace + def upload_pages( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - body, # type: IO - transactional_content_md5=None, # type: Optional[bytearray] - transactional_content_crc64=None, # type: Optional[bytearray] - timeout=None, # type: Optional[int] - range=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - sequence_number_access_conditions=None, # type: Optional["_models.SequenceNumberAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + body: IO[bytes], + transactional_content_md5: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Upload Pages operation writes a range of pages to a page blob. - :param content_length: The length of the request. - :type content_length: long - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param transactional_content_md5: Specify the transactional md5 for the body, to be validated - by the service. - :type transactional_content_md5: bytearray + by the service. Default value is None. + :type transactional_content_md5: bytes :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. - :type transactional_content_crc64: bytearray + validated by the service. Default value is None. + :type transactional_content_crc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param sequence_number_access_conditions: Parameter group. - :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_sequence_number_less_than_or_equal_to = None _if_sequence_number_less_than = None @@ -321,169 +1119,169 @@ def upload_pages( _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_less_than_or_equal_to = ( + sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + ) if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if sequence_number_access_conditions is not None: - _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - comp = "page" - page_write = "update" - content_type = kwargs.pop("content_type", "application/octet-stream") - accept = "application/xml" - - # Construct URL - url = self.upload_pages.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if transactional_content_md5 is not None: - header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray') - if transactional_content_crc64 is not None: - header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_sequence_number_less_than_or_equal_to is not None: - header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') - if _if_sequence_number_less_than is not None: - header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') - if _if_sequence_number_equal_to is not None: - header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content_kwargs['stream_content'] = body - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _content = body + + _request = build_upload_pages_request( + url=self._config.url, + content_length=content_length, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + range=range, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + comp=comp, + page_write=page_write, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: - return cls(pipeline_response, None, response_headers) - - upload_pages.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def clear_pages( + @distributed_trace + def clear_pages( # pylint: disable=inconsistent-return-statements self, - content_length, # type: int - timeout=None, # type: Optional[int] - range=None, # type: Optional[str] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - sequence_number_access_conditions=None, # type: Optional["_models.SequenceNumberAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + content_length: int, + timeout: Optional[int] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Clear Pages operation clears a set of pages from a page blob. - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param sequence_number_access_conditions: Parameter group. - :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["clear"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "clear")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_sequence_number_less_than_or_equal_to = None _if_sequence_number_less_than = None @@ -493,177 +1291,174 @@ def clear_pages( _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_less_than_or_equal_to = ( + sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + ) if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if sequence_number_access_conditions is not None: - _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to - comp = "page" - page_write = "clear" - accept = "application/xml" - - # Construct URL - url = self.clear_pages.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_sequence_number_less_than_or_equal_to is not None: - header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') - if _if_sequence_number_less_than is not None: - header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') - if _if_sequence_number_equal_to is not None: - header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_clear_pages_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + range=range, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + page_write=page_write, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - clear_pages.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def upload_pages_from_url( + @distributed_trace + def upload_pages_from_url( # pylint: disable=inconsistent-return-statements self, - source_url, # type: str - source_range, # type: str - content_length, # type: int - range, # type: str - source_content_md5=None, # type: Optional[bytearray] - source_contentcrc64=None, # type: Optional[bytearray] - timeout=None, # type: Optional[int] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - sequence_number_access_conditions=None, # type: Optional["_models.SequenceNumberAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + source_url: str, + source_range: str, + content_length: int, + range: str, + source_content_md5: Optional[bytes] = None, + source_contentcrc64: Optional[bytes] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Upload Pages operation writes a range of pages to a page blob where the contents are read from a URL. - :param source_url: Specify a URL to the copy source. + :param source_url: Specify a URL to the copy source. Required. :type source_url: str :param source_range: Bytes of source data in the specified range. The length of this range - should match the ContentLength header and x-ms-range/Range destination range header. + should match the ContentLength header and x-ms-range/Range destination range header. Required. :type source_range: str - :param content_length: The length of the request. - :type content_length: long + :param content_length: The length of the request. Required. + :type content_length: int :param range: The range of bytes to which the source range would be written. The range should - be 512 aligned and range-end is required. + be 512 aligned and range-end is required. Required. :type range: str :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read - from the copy source. - :type source_content_md5: bytearray + from the copy source. Default value is None. + :type source_content_md5: bytes :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be - read from the copy source. - :type source_contentcrc64: bytearray + read from the copy source. Default value is None. + :type source_contentcrc64: bytes :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param cpk_info: Parameter group. + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param sequence_number_access_conditions: Parameter group. - :type sequence_number_access_conditions: ~azure.storage.blob.models.SequenceNumberAccessConditions - :param modified_access_conditions: Parameter group. + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. - :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) + page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) + cls: ClsType[None] = kwargs.pop("cls", None) + _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _lease_id = None _if_sequence_number_less_than_or_equal_to = None @@ -679,165 +1474,175 @@ def upload_pages_from_url( _source_if_match = None _source_if_none_match = None if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_less_than_or_equal_to = ( + sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + ) if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - if sequence_number_access_conditions is not None: - _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to - _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than - _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + _if_unmodified_since = modified_access_conditions.if_unmodified_since if source_modified_access_conditions is not None: - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since _source_if_match = source_modified_access_conditions.source_if_match + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match - comp = "page" - page_write = "update" - accept = "application/xml" - - # Construct URL - url = self.upload_pages_from_url.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str') - header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str') - if source_content_md5 is not None: - header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray') - if source_contentcrc64 is not None: - header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray') - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_sequence_number_less_than_or_equal_to is not None: - header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", _if_sequence_number_less_than_or_equal_to, 'long') - if _if_sequence_number_less_than is not None: - header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", _if_sequence_number_less_than, 'long') - if _if_sequence_number_equal_to is not None: - header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", _if_sequence_number_equal_to, 'long') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - if _source_if_modified_since is not None: - header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') - if _source_if_unmodified_since is not None: - header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') - if _source_if_match is not None: - header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') - if _source_if_none_match is not None: - header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + + _request = build_upload_pages_from_url_request( + url=self._config.url, + source_url=source_url, + source_range=source_range, + content_length=content_length, + range=range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + comp=comp, + page_write=page_write, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) - response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) - response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) if cls: - return cls(pipeline_response, None, response_headers) - - upload_pages_from_url.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def get_page_ranges( self, - snapshot=None, # type: Optional[str] - timeout=None, # type: Optional[int] - range=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> "_models.PageList" + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> _models.PageList: + # pylint: disable=line-too-long """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a page blob. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PageList, or the result of cls(response) + :return: PageList or the result of cls(response) :rtype: ~azure.storage.blob.models.PageList - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) + cls: ClsType[_models.PageList] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -847,133 +1652,149 @@ def get_page_ranges( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "pagelist" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_get_page_ranges_request( + url=self._config.url, + snapshot=snapshot, + timeout=timeout, + range=range, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + marker=marker, + maxresults=maxresults, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_page_ranges.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('PageList', pipeline_response) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("PageList", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_page_ranges.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore + @distributed_trace def get_page_ranges_diff( self, - snapshot=None, # type: Optional[str] - timeout=None, # type: Optional[int] - prevsnapshot=None, # type: Optional[str] - prev_snapshot_url=None, # type: Optional[str] - range=None, # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> "_models.PageList" + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + prevsnapshot: Optional[str] = None, + prev_snapshot_url: Optional[str] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> _models.PageList: + # pylint: disable=line-too-long """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were changed between target blob and previous snapshot. :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, - see :code:`Creating a Snapshot of - a Blob.`. + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. :type snapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param prevsnapshot: Optional in version 2015-07-08 and newer. The prevsnapshot parameter is a DateTime value that specifies that the response will contain only pages that were changed between target blob and previous snapshot. Changed pages include both updated and cleared pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is the older of the two. Note that incremental snapshots are currently supported only for blobs - created on or after January 1, 2016. + created on or after January 1, 2016. Default value is None. :type prevsnapshot: str :param prev_snapshot_url: Optional. This header is only supported in service versions 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The response will only contain pages that were changed between the target blob and its previous - snapshot. + snapshot. Default value is None. :type prev_snapshot_url: str - :param range: Return only the bytes of the blob in the specified range. + :param range: Return only the bytes of the blob in the specified range. Default value is None. :type range: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PageList, or the result of cls(response) + :return: PageList or the result of cls(response) :rtype: ~azure.storage.blob.models.PageList - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) + cls: ClsType[_models.PageList] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -983,259 +1804,249 @@ def get_page_ranges_diff( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "pagelist" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_get_page_ranges_diff_request( + url=self._config.url, + snapshot=snapshot, + timeout=timeout, + prevsnapshot=prevsnapshot, + prev_snapshot_url=prev_snapshot_url, + range=range, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + marker=marker, + maxresults=maxresults, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.get_page_ranges_diff.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if snapshot is not None: - query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if prevsnapshot is not None: - query_parameters['prevsnapshot'] = self._serialize.query("prevsnapshot", prevsnapshot, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if prev_snapshot_url is not None: - header_parameters['x-ms-previous-snapshot-url'] = self._serialize.header("prev_snapshot_url", prev_snapshot_url, 'str') - if range is not None: - header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str') - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('PageList', pipeline_response) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-blob-content-length"] = self._deserialize( + "int", response.headers.get("x-ms-blob-content-length") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("PageList", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_page_ranges_diff.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return deserialized # type: ignore - def resize( + @distributed_trace + def resize( # pylint: disable=inconsistent-return-statements self, - blob_content_length, # type: int - timeout=None, # type: Optional[int] - encryption_algorithm="AES256", # type: Optional[str] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_info=None, # type: Optional["_models.CpkInfo"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + blob_content_length: int, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + cpk_info: Optional[_models.CpkInfo] = None, + cpk_scope_info: Optional[_models.CpkScopeInfo] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Resize the Blob. :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 - TB. The page blob size must be aligned to a 512-byte boundary. - :type blob_content_length: long + TB. The page blob size must be aligned to a 512-byte boundary. Required. + :type blob_content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int - :param encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. - :type encryption_algorithm: str :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_info: Parameter group. + :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo - :param cpk_scope_info: Parameter group. + :param cpk_scope_info: Parameter group. Default value is None. :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _encryption_key = None _encryption_key_sha256 = None + _encryption_algorithm = None _encryption_scope = None _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id if cpk_info is not None: + _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key _encryption_key_sha256 = cpk_info.encryption_key_sha256 if cpk_scope_info is not None: _encryption_scope = cpk_scope_info.encryption_scope - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "properties" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_resize_request( + url=self._config.url, + blob_content_length=blob_content_length, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.resize.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _encryption_key is not None: - header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", _encryption_key, 'str') - if _encryption_key_sha256 is not None: - header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", _encryption_key_sha256, 'str') - if encryption_algorithm is not None: - header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore - resize.metadata = {'url': '/{containerName}/{blob}'} # type: ignore - - def update_sequence_number( + @distributed_trace + def update_sequence_number( # pylint: disable=inconsistent-return-statements self, - sequence_number_action, # type: Union[str, "_models.SequenceNumberActionType"] - timeout=None, # type: Optional[int] - blob_sequence_number=0, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + sequence_number_action: Union[str, _models.SequenceNumberActionType], + timeout: Optional[int] = None, + blob_sequence_number: int = 0, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Update the sequence number of the blob. :param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the request. This property applies to page blobs only. This property indicates how the service - should modify the blob's sequence number. + should modify the blob's sequence number. Known values are: "max", "update", and "increment". + Required. :type sequence_number_action: str or ~azure.storage.blob.models.SequenceNumberActionType :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 - and 2^63 - 1. - :type blob_sequence_number: long + and 2^63 - 1. Default value is 0. + :type blob_sequence_number: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param lease_access_conditions: Parameter group. + :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + _lease_id = None _if_modified_since = None _if_unmodified_since = None @@ -1245,81 +2056,69 @@ def update_sequence_number( if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "properties" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_update_sequence_number_request( + url=self._config.url, + sequence_number_action=sequence_number_action, + timeout=timeout, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_sequence_number=blob_sequence_number, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.update_sequence_number.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _lease_id is not None: - header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-sequence-number-action'] = self._serialize.header("sequence_number_action", sequence_number_action, 'str') - if blob_sequence_number is not None: - header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-blob-sequence-number"] = self._deserialize( + "int", response.headers.get("x-ms-blob-sequence-number") + ) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) - - update_sequence_number.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore - def copy_incremental( + @distributed_trace + def copy_incremental( # pylint: disable=inconsistent-return-statements self, - copy_source, # type: str - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] - **kwargs # type: Any - ): - # type: (...) -> None + copy_source: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. The snapshot is copied such that only the differential changes between the previously copied snapshot are transferred to the destination. The copied snapshots are complete copies of @@ -1329,93 +2128,89 @@ def copy_incremental( :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would appear in a request URI. The source blob must either be public or must be authenticated - via a shared access signature. + via a shared access signature. Required. :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :param modified_access_conditions: Parameter group. + :param modified_access_conditions: Parameter group. Default value is None. :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["incrementalcopy"] = kwargs.pop("comp", _params.pop("comp", "incrementalcopy")) + cls: ClsType[None] = kwargs.pop("cls", None) + _if_modified_since = None _if_unmodified_since = None _if_match = None _if_none_match = None _if_tags = None if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since _if_match = modified_access_conditions.if_match + _if_modified_since = modified_access_conditions.if_modified_since _if_none_match = modified_access_conditions.if_none_match _if_tags = modified_access_conditions.if_tags - comp = "incrementalcopy" - accept = "application/xml" + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + _request = build_copy_incremental_request( + url=self._config.url, + copy_source=copy_source, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct URL - url = self.copy_incremental.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if _if_modified_since is not None: - header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') - if _if_unmodified_since is not None: - header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') - if _if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') - if _if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') - if _if_tags is not None: - header_parameters['x-ms-if-tags'] = self._serialize.header("if_tags", _if_tags, 'str') - header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.put(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) - response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) - response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id")) + response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status")) if cls: - return cls(pipeline_response, None, response_headers) - - copy_incremental.metadata = {'url': '/{containerName}/{blob}'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_patch.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_patch.py new file mode 100644 index 000000000000..71dde502c70f --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_patch.py @@ -0,0 +1,26 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + + from typing import List +__all__ = [] # type: List[str] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_service_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_service_operations.py index 72f7a73fdf50..85a930712ca5 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_service_operations.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_generated/operations/_service_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,699 +6,1056 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Iterator, List, Literal, Optional, Type, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from .. import models as _models +from .._serialization import Serializer + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_set_properties_request( + url: str, *, content: Any, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_get_properties_request( + url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_statistics_request( + url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_containers_segment_request( + url: str, + *, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListContainersIncludeType]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if prefix is not None: + _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_user_delegation_key_request( + url: str, *, content: Any, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_get_account_info_request( + url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["restype"] = _SERIALIZER.query("restype", restype, "str") + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_submit_batch_request( + url: str, + *, + content_length: int, + content: IO[bytes], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) + multipart_content_type: Optional[str] = kwargs.pop("multipart_content_type", _headers.pop("Content-Type", None)) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + + # Construct headers + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if multipart_content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("multipart_content_type", multipart_content_type, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_filter_blobs_request( + url: str, + *, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) + version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["comp"] = _SERIALIZER.query("comp", comp, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + if where is not None: + _params["where"] = _SERIALIZER.query("where", where, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if maxresults is not None: + _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1) + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if request_id_parameter is not None: + _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class ServiceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, IO, List, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class ServiceOperations(object): - """ServiceOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.storage.blob.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`service` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def set_properties( + @distributed_trace + def set_properties( # pylint: disable=inconsistent-return-statements self, - storage_service_properties, # type: "_models.StorageServiceProperties" - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None + storage_service_properties: _models.StorageServiceProperties, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Sets properties for a storage account's Blob service endpoint, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. - :param storage_service_properties: The StorageService properties. + :param storage_service_properties: The StorageService properties. Required. :type storage_service_properties: ~azure.storage.blob.models.StorageServiceProperties :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "properties" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.set_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(storage_service_properties, 'StorageServiceProperties', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = self._serialize.body(storage_service_properties, "StorageServiceProperties", is_xml=True) + + _request = build_set_properties_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) if cls: - return cls(pipeline_response, None, response_headers) - - set_properties.metadata = {'url': '/'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def get_properties( - self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "_models.StorageServiceProperties" + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> _models.StorageServiceProperties: + # pylint: disable=line-too-long """gets the properties of a storage account's Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: StorageServiceProperties, or the result of cls(response) + :return: StorageServiceProperties or the result of cls(response) :rtype: ~azure.storage.blob.models.StorageServiceProperties - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceProperties"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_properties.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[_models.StorageServiceProperties] = kwargs.pop("cls", None) + + _request = build_get_properties_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - deserialized = self._deserialize('StorageServiceProperties', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_properties.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace def get_statistics( - self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "_models.StorageServiceStats" + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> _models.StorageServiceStats: + # pylint: disable=line-too-long """Retrieves statistics related to replication for the Blob service. It is only available on the secondary location endpoint when read-access geo-redundant replication is enabled for the storage account. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: StorageServiceStats, or the result of cls(response) + :return: StorageServiceStats or the result of cls(response) :rtype: ~azure.storage.blob.models.StorageServiceStats - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceStats"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "stats" - accept = "application/xml" - - # Construct URL - url = self.get_statistics.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) + cls: ClsType[_models.StorageServiceStats] = kwargs.pop("cls", None) + + _request = build_get_statistics_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('StorageServiceStats', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("StorageServiceStats", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_statistics.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace def list_containers_segment( self, - prefix=None, # type: Optional[str] - marker=None, # type: Optional[str] - maxresults=None, # type: Optional[int] - include=None, # type: Optional[List[Union[str, "_models.ListContainersIncludeType"]]] - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "_models.ListContainersSegmentResponse" + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.ListContainersIncludeType]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> _models.ListContainersSegmentResponse: + # pylint: disable=line-too-long """The List Containers Segment operation returns a list of the containers under the specified account. :param prefix: Filters the results to return only containers whose name begins with the - specified prefix. + specified prefix. Default value is None. :type prefix: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int :param include: Include this parameter to specify that the container's metadata be returned as - part of the response body. + part of the response body. Default value is None. :type include: list[str or ~azure.storage.blob.models.ListContainersIncludeType] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListContainersSegmentResponse, or the result of cls(response) + :return: ListContainersSegmentResponse or the result of cls(response) :rtype: ~azure.storage.blob.models.ListContainersSegmentResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListContainersSegmentResponse"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - comp = "list" - accept = "application/xml" - - # Construct URL - url = self.list_containers_segment.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if prefix is not None: - query_parameters['prefix'] = self._serialize.query("prefix", prefix, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - if include is not None: - query_parameters['include'] = self._serialize.query("include", include, '[str]', div=',') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) + cls: ClsType[_models.ListContainersSegmentResponse] = kwargs.pop("cls", None) + + _request = build_list_containers_segment_request( + url=self._config.url, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - deserialized = self._deserialize('ListContainersSegmentResponse', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = self._deserialize("ListContainersSegmentResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - list_containers_segment.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace def get_user_delegation_key( self, - key_info, # type: "_models.KeyInfo" - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "_models.UserDelegationKey" + key_info: _models.KeyInfo, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> _models.UserDelegationKey: + # pylint: disable=line-too-long """Retrieves a user delegation key for the Blob service. This is only a valid operation when using bearer token authentication. - :param key_info: + :param key_info: Key information. Required. :type key_info: ~azure.storage.blob.models.KeyInfo :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: UserDelegationKey, or the result of cls(response) + :return: UserDelegationKey or the result of cls(response) :rtype: ~azure.storage.blob.models.UserDelegationKey - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UserDelegationKey"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "service" - comp = "userdelegationkey" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.get_user_delegation_key.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(key_info, 'KeyInfo', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) + comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey")) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) + cls: ClsType[_models.UserDelegationKey] = kwargs.pop("cls", None) + + _content = self._serialize.body(key_info, "KeyInfo", is_xml=True) + + _request = build_get_user_delegation_key_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('UserDelegationKey', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("UserDelegationKey", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - get_user_delegation_key.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore - def get_account_info( - self, - **kwargs # type: Any - ): - # type: (...) -> None + @distributed_trace + def get_account_info( # pylint: disable=inconsistent-return-statements + self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + ) -> None: + # pylint: disable=line-too-long """Returns the sku name and account kind. - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - restype = "account" - comp = "properties" - accept = "application/xml" - - # Construct URL - url = self.get_account_info.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) + comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_get_account_info_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + restype=restype, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['restype'] = self._serialize.query("restype", restype, 'str') - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) - response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) - response_headers['x-ms-is-hns-enabled']=self._deserialize('bool', response.headers.get('x-ms-is-hns-enabled')) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-sku-name"] = self._deserialize("str", response.headers.get("x-ms-sku-name")) + response_headers["x-ms-account-kind"] = self._deserialize("str", response.headers.get("x-ms-account-kind")) + response_headers["x-ms-is-hns-enabled"] = self._deserialize("bool", response.headers.get("x-ms-is-hns-enabled")) if cls: - return cls(pipeline_response, None, response_headers) - - get_account_info.metadata = {'url': '/'} # type: ignore + return cls(pipeline_response, None, response_headers) # type: ignore + @distributed_trace def submit_batch( self, - content_length, # type: int - multipart_content_type, # type: str - body, # type: IO - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> IO + content_length: int, + body: IO[bytes], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> Iterator[bytes]: + # pylint: disable=line-too-long """The Batch operation allows multiple API calls to be embedded into a single HTTP request. - :param content_length: The length of the request. - :type content_length: long - :param multipart_content_type: Required. The value of this header must be multipart/mixed with - a batch boundary. Example header value: multipart/mixed; boundary=batch_:code:``. - :type multipart_content_type: str - :param body: Initial data. - :type body: IO + :param content_length: The length of the request. Required. + :type content_length: int + :param body: Initial data. Required. + :type body: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IO, or the result of cls(response) - :rtype: IO - :raises: ~azure.core.exceptions.HttpResponseError + :return: Iterator[bytes] or the result of cls(response) + :rtype: Iterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[IO] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - comp = "batch" - content_type = kwargs.pop("content_type", "application/xml") - accept = "application/xml" - - # Construct URL - url = self.submit_batch.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long') - header_parameters['Content-Type'] = self._serialize.header("multipart_content_type", multipart_content_type, 'str') - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(body, 'IO', is_xml=True) - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) + multipart_content_type: str = kwargs.pop( + "multipart_content_type", _headers.pop("Content-Type", "application/xml") + ) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _content = body + + _request = build_submit_batch_request( + url=self._config.url, + content_length=content_length, + timeout=timeout, + request_id_parameter=request_id_parameter, + comp=comp, + multipart_content_type=multipart_content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - deserialized = response.stream_download(self._client._pipeline) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - submit_batch.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore + @distributed_trace def filter_blobs( self, - timeout=None, # type: Optional[int] - request_id_parameter=None, # type: Optional[str] - where=None, # type: Optional[str] - marker=None, # type: Optional[str] - maxresults=None, # type: Optional[int] - **kwargs # type: Any - ): - # type: (...) -> "_models.FilterBlobSegment" + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, + **kwargs: Any + ) -> _models.FilterBlobSegment: + # pylint: disable=line-too-long """The Filter Blobs operation enables callers to list blobs across all containers whose tags match a given search expression. Filter blobs searches across all containers within a storage account but can be scoped within the expression to a single container. :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting Timeouts for Blob Service Operations.`. + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. :type request_id_parameter: str :param where: Filters the results to return only to return only blobs whose tags match the - specified expression. + specified expression. Default value is None. :type where: str :param marker: A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the - client. + client. Default value is None. :type marker: str :param maxresults: Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than - the default of 5000. + the default of 5000. Default value is None. :type maxresults: int - :keyword callable cls: A custom type or function that will be passed the direct response - :return: FilterBlobSegment, or the result of cls(response) + :param include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :type include: list[str or ~azure.storage.blob.models.FilterBlobsIncludeItem] + :return: FilterBlobSegment or the result of cls(response) :rtype: ~azure.storage.blob.models.FilterBlobSegment - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.FilterBlobSegment"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - comp = "blobs" - accept = "application/xml" - - # Construct URL - url = self.filter_blobs.metadata['url'] # type: ignore - path_format_arguments = { - 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + error_map: MutableMapping[int, Type[HttpResponseError]] = { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['comp'] = self._serialize.query("comp", comp, 'str') - if timeout is not None: - query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) - if where is not None: - query_parameters['where'] = self._serialize.query("where", where, 'str') - if marker is not None: - query_parameters['marker'] = self._serialize.query("marker", marker, 'str') - if maxresults is not None: - query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1) - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') - if request_id_parameter is not None: - header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) + cls: ClsType[_models.FilterBlobSegment] = kwargs.pop("cls", None) + + _request = build_filter_blobs_request( + url=self._config.url, + timeout=timeout, + request_id_parameter=request_id_parameter, + where=where, + marker=marker, + maxresults=maxresults, + include=include, + comp=comp, + version=self._config.version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.StorageError, response) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) - response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) - response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) - response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) - deserialized = self._deserialize('FilterBlobSegment', pipeline_response) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + + deserialized = self._deserialize("FilterBlobSegment", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - filter_blobs.metadata = {'url': '/'} # type: ignore + return deserialized # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_lease.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_lease.py index d495d6e2dfb9..b8b5684d7c23 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_lease.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_lease.py @@ -3,60 +3,55 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-keyword-should-match-keyword-only import uuid -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, TypeVar, TYPE_CHECKING -) +from typing import Any, Optional, Union, TYPE_CHECKING from azure.core.exceptions import HttpResponseError from azure.core.tracing.decorator import distributed_trace -from ._shared.response_handlers import return_response_headers, process_storage_error +from ._shared.response_handlers import process_storage_error, return_response_headers from ._serialize import get_modify_conditions if TYPE_CHECKING: + from azure.storage.blob import BlobClient, ContainerClient from datetime import datetime - BlobClient = TypeVar("BlobClient") - ContainerClient = TypeVar("ContainerClient") - -class BlobLeaseClient(object): +class BlobLeaseClient(): # pylint: disable=client-accepts-api-version-keyword """Creates a new BlobLeaseClient. This client provides lease operations on a BlobClient or ContainerClient. - - :ivar str id: - The ID of the lease currently being maintained. This will be `None` if no - lease has yet been acquired. - :ivar str etag: - The ETag of the lease currently being maintained. This will be `None` if no - lease has yet been acquired or modified. - :ivar ~datetime.datetime last_modified: - The last modified timestamp of the lease currently being maintained. - This will be `None` if no lease has yet been acquired or modified. - - :param client: - The client of the blob or container to lease. - :type client: ~azure.storage.blob.BlobClient or - ~azure.storage.blob.ContainerClient - :param str lease_id: - A string representing the lease ID of an existing lease. This value does not - need to be specified in order to acquire a new lease, or break one. + :param client: The client of the blob or container to lease. + :type client: Union[BlobClient, ContainerClient] + :param lease_id: A string representing the lease ID of an existing lease. This value does not need to be + specified in order to acquire a new lease, or break one. + :type lease_id: Optional[str] """ - def __init__( - self, client, lease_id=None - ): # pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs - # type: (Union[BlobClient, ContainerClient], Optional[str]) -> None + + id: str + """The ID of the lease currently being maintained. This will be `None` if no + lease has yet been acquired.""" + etag: Optional[str] + """The ETag of the lease currently being maintained. This will be `None` if no + lease has yet been acquired or modified.""" + last_modified: Optional["datetime"] + """The last modified timestamp of the lease currently being maintained. + This will be `None` if no lease has yet been acquired or modified.""" + + def __init__( # pylint: disable=missing-client-constructor-parameter-credential, missing-client-constructor-parameter-kwargs + self, client: Union["BlobClient", "ContainerClient"], + lease_id: Optional[str] = None + ) -> None: self.id = lease_id or str(uuid.uuid4()) self.last_modified = None self.etag = None if hasattr(client, 'blob_name'): - self._client = client._client.blob # type: ignore # pylint: disable=protected-access + self._client = client._client.blob elif hasattr(client, 'container_name'): - self._client = client._client.container # type: ignore # pylint: disable=protected-access + self._client = client._client.container else: raise TypeError("Lease must use either BlobClient or ContainerClient.") @@ -67,8 +62,7 @@ def __exit__(self, *args): self.release() @distributed_trace - def acquire(self, lease_duration=-1, **kwargs): - # type: (int, **Any) -> None + def acquire(self, lease_duration: int = -1, **kwargs: Any) -> None: """Requests a new lease. If the container does not have an active lease, the Blob service creates a @@ -103,12 +97,16 @@ def acquire(self, lease_duration=-1, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = self._client.acquire_lease( + response: Any = self._client.acquire_lease( timeout=kwargs.pop('timeout', None), duration=lease_duration, proposed_lease_id=self.id, @@ -117,13 +115,12 @@ def acquire(self, lease_duration=-1, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime - self.etag = response.get('etag') # type: str + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') + self.etag = response.get('etag') @distributed_trace - def renew(self, **kwargs): - # type: (Any) -> None + def renew(self, **kwargs: Any) -> None: """Renews the lease. The lease can be renewed if the lease ID specified in the @@ -156,12 +153,16 @@ def renew(self, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = self._client.renew_lease( + response: Any = self._client.renew_lease( lease_id=self.id, timeout=kwargs.pop('timeout', None), modified_access_conditions=mod_conditions, @@ -169,13 +170,12 @@ def renew(self, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.etag = response.get('etag') # type: str - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') @distributed_trace - def release(self, **kwargs): - # type: (Any) -> None + def release(self, **kwargs: Any) -> None: """Release the lease. The lease may be released if the client lease id specified matches @@ -206,12 +206,16 @@ def release(self, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = self._client.release_lease( + response: Any = self._client.release_lease( lease_id=self.id, timeout=kwargs.pop('timeout', None), modified_access_conditions=mod_conditions, @@ -219,13 +223,12 @@ def release(self, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.etag = response.get('etag') # type: str - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') @distributed_trace - def change(self, proposed_lease_id, **kwargs): - # type: (str, Any) -> None + def change(self, proposed_lease_id: str, **kwargs: Any) -> None: """Change the lease ID of an active lease. :param str proposed_lease_id: @@ -255,12 +258,16 @@ def change(self, proposed_lease_id, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = self._client.change_lease( + response: Any = self._client.change_lease( lease_id=self.id, proposed_lease_id=proposed_lease_id, timeout=kwargs.pop('timeout', None), @@ -269,13 +276,12 @@ def change(self, proposed_lease_id, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.etag = response.get('etag') # type: str - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') @distributed_trace - def break_lease(self, lease_break_period=None, **kwargs): - # type: (Optional[int], Any) -> int + def break_lease(self, lease_break_period: Optional[int] = None, **kwargs: Any) -> int: """Break the lease, if the container or blob has an active lease. Once a lease is broken, it cannot be renewed. Any authorized request can break the lease; @@ -314,7 +320,11 @@ def break_lease(self, lease_break_period=None, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: Approximate time remaining in the lease period, in seconds. :rtype: int """ diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_list_blobs_helper.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_list_blobs_helper.py index 309d37bd9583..5e357cea4fb4 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_list_blobs_helper.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_list_blobs_helper.py @@ -1,57 +1,74 @@ -# pylint: disable=too-many-lines # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -from azure.core.paging import PageIterator, ItemPaged +from typing import Any, Callable, cast, List, Optional, Tuple, Union +from urllib.parse import unquote + from azure.core.exceptions import HttpResponseError -from ._deserialize import get_blob_properties_from_generated_code, parse_tags +from azure.core.paging import ItemPaged, PageIterator + +from ._deserialize import ( + get_blob_properties_from_generated_code, + load_many_xml_nodes, + load_xml_int, + load_xml_string, + parse_tags +) from ._generated.models import BlobItemInternal, BlobPrefix as GenBlobPrefix, FilterBlobItem +from ._generated._serialization import Deserializer from ._models import BlobProperties, FilteredBlob from ._shared.models import DictMixin -from ._shared.response_handlers import return_context_and_deserialized, process_storage_error +from ._shared.response_handlers import ( + process_storage_error, + return_context_and_deserialized, + return_raw_deserialized +) + + +class IgnoreListBlobsDeserializer(Deserializer): + def __call__(self, target_obj, response_data, content_type=None): # pylint: disable=inconsistent-return-statements + if target_obj == "ListBlobsFlatSegmentResponse": + return None + super().__call__(target_obj, response_data, content_type) class BlobPropertiesPaged(PageIterator): - """An Iterable of Blob properties. + """An Iterable of Blob properties.""" + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + container: Optional[str] + """The container that the blobs are listed from.""" + delimiter: Optional[str] + """A delimiting character used for hierarchy listing.""" + command: Callable + """Function to retrieve the next page of items.""" - :ivar str service_endpoint: The service URL. - :ivar str prefix: A blob name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str continuation_token: The continuation token to retrieve the next page of results. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.BlobProperties) - :ivar str container: The container that the blobs are listed from. - :ivar str delimiter: A delimiting character used for hierarchy listing. - - :param callable command: Function to retrieve the next page of items. - :param str container: The name of the container. - :param str prefix: Filters the results to return only blobs whose names - begin with the specified prefix. - :param int results_per_page: The maximum number of blobs to retrieve per - call. - :param str continuation_token: An opaque continuation token. - :param str delimiter: - Used to capture blobs whose names begin with the same substring up to - the appearance of the delimiter character. The delimiter may be a single - character or a string. - :param location_mode: Specifies the location the request should be sent to. - This mode only applies for RA-GRS accounts which allow secondary read access. - Options include 'primary' or 'secondary'. - """ def __init__( - self, command, - container=None, - prefix=None, - results_per_page=None, - continuation_token=None, - delimiter=None, - location_mode=None): + self, command: Callable, + container: str, + prefix: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None, + delimiter: Optional[str] = None, + location_mode: Optional[str] = None, + ) -> None: super(BlobPropertiesPaged, self).__init__( get_next=self._get_next_cb, extract_data=self._extract_data_cb, @@ -79,7 +96,7 @@ def _get_next_cb(self, continuation_token): process_storage_error(error) def _extract_data_cb(self, get_next_return): - self.location_mode, self._response = get_next_return + self.location_mode, self._response = cast(Tuple[Optional[str], Any], get_next_return) self.service_endpoint = self._response.service_endpoint self.prefix = self._response.prefix self.marker = self._response.marker @@ -89,16 +106,89 @@ def _extract_data_cb(self, get_next_return): return self._response.next_marker or None, self.current_page - def _build_item(self, item): + def _build_item(self, item: Union[BlobItemInternal, BlobProperties]) -> BlobProperties: if isinstance(item, BlobProperties): return item if isinstance(item, BlobItemInternal): - blob = get_blob_properties_from_generated_code(item) # pylint: disable=protected-access - blob.container = self.container + blob = get_blob_properties_from_generated_code(item) + blob.container = self.container # type: ignore [assignment] return blob return item +class BlobNamesPaged(PageIterator): + """An Iterable of Blob names.""" + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of blobs to retrieve per call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + container: Optional[str] + """The container that the blobs are listed from.""" + delimiter: Optional[str] + """A delimiting character used for hierarchy listing.""" + command: Callable + """Function to retrieve the next page of items.""" + + def __init__( + self, command: Callable, + container: Optional[str] = None, + prefix: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None, + location_mode: Optional[str] = None + ) -> None: + super(BlobNamesPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.service_endpoint = None + self.prefix = prefix + self.marker = None + self.results_per_page = results_per_page + self.container = container + self.current_page = None + self.location_mode = location_mode + + def _get_next_cb(self, continuation_token): + try: + return self._command( + prefix=self.prefix, + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_raw_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.service_endpoint = self._response.get('ServiceEndpoint') + self.prefix = load_xml_string(self._response, 'Prefix') + self.marker = load_xml_string(self._response, 'Marker') + self.results_per_page = load_xml_int(self._response, 'MaxResults') + self.container = self._response.get('ContainerName') + + blobs = load_many_xml_nodes(self._response, 'Blob', wrapper='Blobs') + self.current_page = [load_xml_string(blob, 'Name') for blob in blobs] + + next_marker = load_xml_string(self._response, 'NextMarker') + return next_marker or None, self.current_page + + class BlobPrefixPaged(BlobPropertiesPaged): def __init__(self, *args, **kwargs): super(BlobPrefixPaged, self).__init__(*args, **kwargs) @@ -115,10 +205,14 @@ def _extract_data_cb(self, get_next_return): def _build_item(self, item): item = super(BlobPrefixPaged, self)._build_item(item) if isinstance(item, GenBlobPrefix): + if item.name.encoded: + name = unquote(item.name.content) + else: + name = item.name.content return BlobPrefix( self._command, container=self.container, - prefix=item.name, + prefix=name, results_per_page=self.results_per_page, location_mode=self.location_mode) return item @@ -128,74 +222,72 @@ class BlobPrefix(ItemPaged, DictMixin): """An Iterable of Blob properties. Returned from walk_blobs when a delimiter is used. - Can be thought of as a virtual blob directory. - - :ivar str name: The prefix, or "directory name" of the blob. - :ivar str service_endpoint: The service URL. - :ivar str prefix: A blob name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str next_marker: The continuation token to retrieve the next page of results. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.BlobProperties) - :ivar str container: The container that the blobs are listed from. - :ivar str delimiter: A delimiting character used for hierarchy listing. - - :param callable command: Function to retrieve the next page of items. - :param str prefix: Filters the results to return only blobs whose names - begin with the specified prefix. - :param int results_per_page: The maximum number of blobs to retrieve per - call. - :param str marker: An opaque continuation token. - :param str delimiter: - Used to capture blobs whose names begin with the same substring up to - the appearance of the delimiter character. The delimiter may be a single - character or a string. - :param location_mode: Specifies the location the request should be sent to. - This mode only applies for RA-GRS accounts which allow secondary read access. - Options include 'primary' or 'secondary'. - """ - def __init__(self, *args, **kwargs): + Can be thought of as a virtual blob directory.""" + + name: str + """The prefix, or "directory name" of the blob.""" + service_endpoint: Optional[str] + """The service URL.""" + prefix: str + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + next_marker: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: str + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + delimiter: str + """A delimiting character used for hierarchy listing.""" + command: Callable + """Function to retrieve the next page of items.""" + container: str + """The name of the container.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: super(BlobPrefix, self).__init__(*args, page_iterator_class=BlobPrefixPaged, **kwargs) - self.name = kwargs.get('prefix') - self.prefix = kwargs.get('prefix') + self.name = kwargs.get('prefix') # type: ignore [assignment] + self.prefix = kwargs.get('prefix') # type: ignore [assignment] self.results_per_page = kwargs.get('results_per_page') - self.container = kwargs.get('container') - self.delimiter = kwargs.get('delimiter') - self.location_mode = kwargs.get('location_mode') + self.container = kwargs.get('container') # type: ignore [assignment] + self.delimiter = kwargs.get('delimiter') # type: ignore [assignment] + self.location_mode = kwargs.get('location_mode') # type: ignore [assignment] class FilteredBlobPaged(PageIterator): - """An Iterable of Blob properties. + """An Iterable of Blob properties.""" + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + command: Callable + """Function to retrieve the next page of items.""" + container: Optional[str] + """The name of the container.""" - :ivar str service_endpoint: The service URL. - :ivar str prefix: A blob name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str continuation_token: The continuation token to retrieve the next page of results. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.FilteredBlob) - :ivar str container: The container that the blobs are listed from. - - :param callable command: Function to retrieve the next page of items. - :param str container: The name of the container. - :param int results_per_page: The maximum number of blobs to retrieve per - call. - :param str continuation_token: An opaque continuation token. - :param location_mode: Specifies the location the request should be sent to. - This mode only applies for RA-GRS accounts which allow secondary read access. - Options include 'primary' or 'secondary'. - """ def __init__( - self, command, - container=None, - results_per_page=None, - continuation_token=None, - location_mode=None): + self, command: Callable, + container: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None, + location_mode: Optional[str] = None + ) -> None: super(FilteredBlobPaged, self).__init__( get_next=self._get_next_cb, extract_data=self._extract_data_cb, diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_models.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_models.py index 1a8237cfea14..fbde3a808e1e 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_models.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_models.py @@ -7,49 +7,91 @@ # pylint: disable=super-init-not-called, too-many-lines from enum import Enum +from typing import Any, Callable, Dict, List, Optional, Union, TYPE_CHECKING +from azure.core import CaseInsensitiveEnumMeta from azure.core.paging import PageIterator from azure.core.exceptions import HttpResponseError -from ._generated.models import ArrowField -from ._shared import decode_base64_to_text +from ._shared import decode_base64_to_bytes from ._shared.response_handlers import return_context_and_deserialized, process_storage_error from ._shared.models import DictMixin, get_enum_value +from ._generated.models import AccessPolicy as GenAccessPolicy +from ._generated.models import ArrowField +from ._generated.models import CorsRule as GeneratedCorsRule from ._generated.models import Logging as GeneratedLogging from ._generated.models import Metrics as GeneratedMetrics from ._generated.models import RetentionPolicy as GeneratedRetentionPolicy from ._generated.models import StaticWebsite as GeneratedStaticWebsite -from ._generated.models import CorsRule as GeneratedCorsRule -from ._generated.models import AccessPolicy as GenAccessPolicy +if TYPE_CHECKING: + from datetime import datetime + from ._generated.models import PageList -class BlobType(str, Enum): +# Parse a generated PageList into a single list of PageRange sorted by start. +def parse_page_list(page_list: "PageList") -> List["PageRange"]: - BlockBlob = "BlockBlob" - PageBlob = "PageBlob" - AppendBlob = "AppendBlob" + page_ranges = page_list.page_range + clear_ranges = page_list.clear_range + if page_ranges is None: + raise ValueError("PageList's 'page_range' is malformed or None.") + if clear_ranges is None: + raise ValueError("PageList's 'clear_ranges' is malformed or None.") -class BlockState(str, Enum): + ranges = [] + p_i, c_i = 0, 0 + + # Combine page ranges and clear ranges into single list, sorted by start + while p_i < len(page_ranges) and c_i < len(clear_ranges): + p, c = page_ranges[p_i], clear_ranges[c_i] + + if p.start < c.start: + ranges.append( + PageRange(start=p.start, end=p.end, cleared=False) + ) + p_i += 1 + else: + ranges.append( + PageRange(start=c.start, end=c.end, cleared=True) + ) + c_i += 1 + + # Grab remaining elements in either list + ranges += [PageRange(start=r.start, end=r.end, cleared=False) for r in page_ranges[p_i:]] + ranges += [PageRange(start=r.start, end=r.end, cleared=True) for r in clear_ranges[c_i:]] + + return ranges + + +class BlobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + BLOCKBLOB = "BlockBlob" + PAGEBLOB = "PageBlob" + APPENDBLOB = "AppendBlob" + + +class BlockState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Block blob block types.""" - Committed = 'Committed' #: Committed blocks. - Latest = 'Latest' #: Latest blocks. - Uncommitted = 'Uncommitted' #: Uncommitted blocks. + COMMITTED = 'Committed' #: Committed blocks. + LATEST = 'Latest' #: Latest blocks. + UNCOMMITTED = 'Uncommitted' #: Uncommitted blocks. -class StandardBlobTier(str, Enum): +class StandardBlobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ Specifies the blob tier to set the blob to. This is only applicable for block blobs on standard storage accounts. """ - Archive = 'Archive' #: Archive - Cool = 'Cool' #: Cool - Hot = 'Hot' #: Hot + ARCHIVE = 'Archive' #: Archive + COOL = 'Cool' #: Cool + COLD = 'Cold' #: Cold + HOT = 'Hot' #: Hot -class PremiumPageBlobTier(str, Enum): +class PremiumPageBlobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. Please take a look at: @@ -60,6 +102,7 @@ class PremiumPageBlobTier(str, Enum): P4 = 'P4' #: P4 Tier P6 = 'P6' #: P6 Tier P10 = 'P10' #: P10 Tier + P15 = 'P15' #: P15 Tier P20 = 'P20' #: P20 Tier P30 = 'P30' #: P30 Tier P40 = 'P40' #: P40 Tier @@ -67,26 +110,34 @@ class PremiumPageBlobTier(str, Enum): P60 = 'P60' #: P60 Tier -class SequenceNumberAction(str, Enum): +class QuickQueryDialect(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Specifies the quick query input/output dialect.""" + + DELIMITEDTEXT = 'DelimitedTextDialect' + DELIMITEDJSON = 'DelimitedJsonDialect' + PARQUET = 'ParquetDialect' + + +class SequenceNumberAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Sequence number actions.""" - Increment = 'increment' + INCREMENT = 'increment' """ Increments the value of the sequence number by 1. If specifying this option, do not include the x-ms-blob-sequence-number header. """ - Max = 'max' + MAX = 'max' """ Sets the sequence number to be the higher of the value included with the request and the value currently stored for the blob. """ - Update = 'update' + UPDATE = 'update' """Sets the sequence number to the value included with the request.""" -class PublicAccess(str, Enum): +class PublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ Specifies whether data in the container may be accessed publicly and the level of access. """ @@ -97,14 +148,14 @@ class PublicAccess(str, Enum): Clients cannot enumerate the containers within the storage account as well as the blobs within the container. """ - Blob = 'blob' + BLOB = 'blob' """ Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request. """ - Container = 'container' + CONTAINER = 'container' """ Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers @@ -112,6 +163,48 @@ class PublicAccess(str, Enum): """ +class BlobImmutabilityPolicyMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ + Specifies the immutability policy mode to set on the blob. + "Mutable" can only be returned by service, don't set to "Mutable". + """ + + UNLOCKED = "Unlocked" + LOCKED = "Locked" + MUTABLE = "Mutable" + + +class RetentionPolicy(GeneratedRetentionPolicy): + """The retention policy which determines how long the associated data should + persist. + + :param bool enabled: + Indicates whether a retention policy is enabled for the storage service. + The default value is False. + :param Optional[int] days: + Indicates the number of days that metrics or logging or + soft-deleted data should be retained. All data older than this value will + be deleted. If enabled=True, the number of days must be specified. + """ + + enabled: bool = False + days: Optional[int] = None + + def __init__(self, enabled: bool = False, days: Optional[int] = None) -> None: + super(RetentionPolicy, self).__init__(enabled=enabled, days=days, allow_permanent_delete=None) + if self.enabled and (self.days is None): + raise ValueError("If policy is enabled, 'days' must be specified.") + + @classmethod + def _from_generated(cls, generated): + if not generated: + return cls() + return cls( + enabled=generated.enabled, + days=generated.days, + ) + + class BlobAnalyticsLogging(GeneratedLogging): """Azure Analytics Logging settings. @@ -128,8 +221,19 @@ class BlobAnalyticsLogging(GeneratedLogging): policy will be disabled by default. """ - def __init__(self, **kwargs): - self.version = kwargs.get('version', u'1.0') + version: str = '1.0' + """The version of Storage Analytics to configure.""" + delete: bool = False + """Indicates whether all delete requests should be logged.""" + read: bool = False + """Indicates whether all read requests should be logged.""" + write: bool = False + """Indicates whether all write requests should be logged.""" + retention_policy: RetentionPolicy = RetentionPolicy() + """Determines how long the associated data should persist.""" + + def __init__(self, **kwargs: Any) -> None: + self.version = kwargs.get('version', '1.0') self.delete = kwargs.get('delete', False) self.read = kwargs.get('read', False) self.write = kwargs.get('write', False) @@ -164,8 +268,17 @@ class Metrics(GeneratedMetrics): policy will be disabled by default. """ - def __init__(self, **kwargs): - self.version = kwargs.get('version', u'1.0') + version: str = '1.0' + """The version of Storage Analytics to configure.""" + enabled: bool = False + """Indicates whether metrics are enabled for the Blob service.""" + include_apis: Optional[bool] + """Indicates whether metrics should generate summary statistics for called API operations.""" + retention_policy: RetentionPolicy = RetentionPolicy() + """Determines how long the associated data should persist.""" + + def __init__(self, **kwargs: Any) -> None: + self.version = kwargs.get('version', '1.0') self.enabled = kwargs.get('enabled', False) self.include_apis = kwargs.get('include_apis') self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy() @@ -182,34 +295,6 @@ def _from_generated(cls, generated): ) -class RetentionPolicy(GeneratedRetentionPolicy): - """The retention policy which determines how long the associated data should - persist. - - :param bool enabled: - Indicates whether a retention policy is enabled for the storage service. - The default value is False. - :param int days: - Indicates the number of days that metrics or logging or - soft-deleted data should be retained. All data older than this value will - be deleted. If enabled=True, the number of days must be specified. - """ - - def __init__(self, enabled=False, days=None): - super(RetentionPolicy, self).__init__(enabled=enabled, days=days, allow_permanent_delete=None) - if self.enabled and (self.days is None): - raise ValueError("If policy is enabled, 'days' must be specified.") - - @classmethod - def _from_generated(cls, generated): - if not generated: - return cls() - return cls( - enabled=generated.enabled, - days=generated.days, - ) - - class StaticWebsite(GeneratedStaticWebsite): """The properties that enable an account to host a static website. @@ -224,7 +309,16 @@ class StaticWebsite(GeneratedStaticWebsite): Absolute path of the default index page. """ - def __init__(self, **kwargs): + enabled: bool = False + """Indicates whether this account is hosting a static website.""" + index_document: Optional[str] + """The default name of the index page under each directory.""" + error_document404_path: Optional[str] + """The absolute path of the custom 404 page.""" + default_index_document_path: Optional[str] + """Absolute path of the default index page.""" + + def __init__(self, **kwargs: Any) -> None: self.enabled = kwargs.get('enabled', False) if self.enabled: self.index_document = kwargs.get('index_document') @@ -275,13 +369,45 @@ class CorsRule(GeneratedCorsRule): preflight response. """ - def __init__(self, allowed_origins, allowed_methods, **kwargs): + allowed_origins: str + """The comma-delimited string representation of the list of origin domains that will be allowed via + CORS, or "*" to allow all domains.""" + allowed_methods: str + """The comma-delimited string representation of the list HTTP methods that are allowed to be executed + by the origin.""" + exposed_headers: str + """The comma-delimited string representation of the list of response headers to expose to CORS clients.""" + allowed_headers: str + """The comma-delimited string representation of the list of headers allowed to be part of the cross-origin + request.""" + max_age_in_seconds: int + """The number of seconds that the client/browser should cache a pre-flight response.""" + + def __init__(self, allowed_origins: List[str], allowed_methods: List[str], **kwargs: Any) -> None: self.allowed_origins = ','.join(allowed_origins) self.allowed_methods = ','.join(allowed_methods) self.allowed_headers = ','.join(kwargs.get('allowed_headers', [])) self.exposed_headers = ','.join(kwargs.get('exposed_headers', [])) self.max_age_in_seconds = kwargs.get('max_age_in_seconds', 0) + @staticmethod + def _to_generated(rules: Optional[List["CorsRule"]]) -> Optional[List[GeneratedCorsRule]]: + if rules is None: + return rules + + generated_cors_list = [] + for cors_rule in rules: + generated_cors = GeneratedCorsRule( + allowed_origins=cors_rule.allowed_origins, + allowed_methods=cors_rule.allowed_methods, + allowed_headers=cors_rule.allowed_headers, + exposed_headers=cors_rule.exposed_headers, + max_age_in_seconds=cors_rule.max_age_in_seconds + ) + generated_cors_list.append(generated_cors) + + return generated_cors_list + @classmethod def _from_generated(cls, generated): return cls( @@ -298,39 +424,46 @@ class ContainerProperties(DictMixin): Returned ``ContainerProperties`` instances expose these values through a dictionary interface, for example: ``container_props["last_modified"]``. - Additionally, the container name is available as ``container_props["name"]``. - - :ivar ~datetime.datetime last_modified: - A datetime object representing the last time the container was modified. - :ivar str etag: - The ETag contains a value that you can use to perform operations - conditionally. - :ivar ~azure.storage.blob.LeaseProperties lease: - Stores all the lease information for the container. - :ivar str public_access: Specifies whether data in the container may be accessed - publicly and the level of access. - :ivar bool has_immutability_policy: - Represents whether the container has an immutability policy. - :ivar bool has_legal_hold: - Represents whether the container has a legal hold. - :ivar dict metadata: A dict with name-value pairs to associate with the - container as metadata. - :ivar ~azure.storage.blob.ContainerEncryptionScope encryption_scope: - The default encryption scope configuration for the container. - """ - - def __init__(self, **kwargs): - self.name = None - self.last_modified = kwargs.get('Last-Modified') - self.etag = kwargs.get('ETag') + Additionally, the container name is available as ``container_props["name"]``.""" + + name: str + """Name of the container.""" + last_modified: "datetime" + """A datetime object representing the last time the container was modified.""" + etag: str + """The ETag contains a value that you can use to perform operations conditionally.""" + lease: "LeaseProperties" + """Stores all the lease information for the container.""" + public_access: Optional[str] + """Specifies whether data in the container may be accessed publicly and the level of access.""" + has_immutability_policy: bool + """Represents whether the container has an immutability policy.""" + has_legal_hold: bool + """Represents whether the container has a legal hold.""" + immutable_storage_with_versioning_enabled: bool + """Represents whether immutable storage with versioning enabled on the container.""" + metadata: Dict[str, Any] + """A dict with name-value pairs to associate with the container as metadata.""" + encryption_scope: Optional["ContainerEncryptionScope"] + """The default encryption scope configuration for the container.""" + deleted: Optional[bool] + """Whether this container was deleted.""" + version: Optional[str] + """The version of a deleted container.""" + + def __init__(self, **kwargs: Any) -> None: + self.name = None # type: ignore [assignment] + self.last_modified = kwargs.get('Last-Modified') # type: ignore [assignment] + self.etag = kwargs.get('ETag') # type: ignore [assignment] self.lease = LeaseProperties(**kwargs) self.public_access = kwargs.get('x-ms-blob-public-access') - self.has_immutability_policy = kwargs.get('x-ms-has-immutability-policy') + self.has_immutability_policy = kwargs.get('x-ms-has-immutability-policy') # type: ignore [assignment] self.deleted = None self.version = None - self.has_legal_hold = kwargs.get('x-ms-has-legal-hold') - self.metadata = kwargs.get('metadata') + self.has_legal_hold = kwargs.get('x-ms-has-legal-hold') # type: ignore [assignment] + self.metadata = kwargs.get('metadata') # type: ignore [assignment] self.encryption_scope = None + self.immutable_storage_with_versioning_enabled = kwargs.get('x-ms-immutable-storage-with-versioning-enabled') # type: ignore [assignment] # pylint: disable=name-too-long default_encryption_scope = kwargs.get('x-ms-default-encryption-scope') if default_encryption_scope: self.encryption_scope = ContainerEncryptionScope( @@ -347,6 +480,7 @@ def _from_generated(cls, generated): props.lease = LeaseProperties._from_generated(generated) # pylint: disable=protected-access props.public_access = generated.properties.public_access props.has_immutability_policy = generated.properties.has_immutability_policy + props.immutable_storage_with_versioning_enabled = generated.properties.is_immutable_storage_with_versioning_enabled # pylint: disable=line-too-long, name-too-long props.deleted = generated.deleted props.version = generated.version props.has_legal_hold = generated.properties.has_legal_hold @@ -358,24 +492,34 @@ def _from_generated(cls, generated): class ContainerPropertiesPaged(PageIterator): """An Iterable of Container properties. - :ivar str service_endpoint: The service URL. - :ivar str prefix: A container name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str continuation_token: The continuation token to retrieve the next page of results. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.ContainerProperties) - - :param callable command: Function to retrieve the next page of items. - :param str prefix: Filters the results to return only containers whose names + :param Callable command: Function to retrieve the next page of items. + :param Optional[str] prefix: Filters the results to return only containers whose names begin with the specified prefix. - :param int results_per_page: The maximum number of container names to retrieve per - call. - :param str continuation_token: An opaque continuation token. + :param Optional[int] results_per_page: The maximum number of container names to retrieve per call. + :param Optional[str] continuation_token: An opaque continuation token. """ - def __init__(self, command, prefix=None, results_per_page=None, continuation_token=None): + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A container name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results.""" + current_page: List["ContainerProperties"] + """The current page of listed results.""" + + def __init__( + self, command: Callable, + prefix: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None + ) -> None: super(ContainerPropertiesPaged, self).__init__( get_next=self._get_next_cb, extract_data=self._extract_data_cb, @@ -414,177 +558,64 @@ def _build_item(item): return ContainerProperties._from_generated(item) # pylint: disable=protected-access -class BlobProperties(DictMixin): - """ - Blob Properties. - - :ivar str name: - The name of the blob. - :ivar str container: - The container in which the blob resides. - :ivar str snapshot: - Datetime value that uniquely identifies the blob snapshot. - :ivar ~azure.blob.storage.BlobType blob_type: - String indicating this blob's type. - :ivar dict metadata: - Name-value pairs associated with the blob as metadata. - :ivar ~datetime.datetime last_modified: - A datetime object representing the last time the blob was modified. - :ivar str etag: - The ETag contains a value that you can use to perform operations - conditionally. - :ivar int size: - The size of the content returned. If the entire blob was requested, - the length of blob in bytes. If a subset of the blob was requested, the - length of the returned subset. - :ivar str content_range: - Indicates the range of bytes returned in the event that the client - requested a subset of the blob. - :ivar int append_blob_committed_block_count: - (For Append Blobs) Number of committed blocks in the blob. - :ivar bool is_append_blob_sealed: - Indicate if the append blob is sealed or not. - - .. versionadded:: 12.4.0 - - :ivar int page_blob_sequence_number: - (For Page Blobs) Sequence number for page blob used for coordinating - concurrent writes. - :ivar bool server_encrypted: - Set to true if the blob is encrypted on the server. - :ivar ~azure.storage.blob.CopyProperties copy: - Stores all the copy properties for the blob. - :ivar ~azure.storage.blob.ContentSettings content_settings: - Stores all the content settings for the blob. - :ivar ~azure.storage.blob.LeaseProperties lease: - Stores all the lease information for the blob. - :ivar ~azure.storage.blob.StandardBlobTier blob_tier: - Indicates the access tier of the blob. The hot tier is optimized - for storing data that is accessed frequently. The cool storage tier - is optimized for storing data that is infrequently accessed and stored - for at least a month. The archive tier is optimized for storing - data that is rarely accessed and stored for at least six months - with flexible latency requirements. - :ivar str rehydrate_priority: - Indicates the priority with which to rehydrate an archived blob - :ivar ~datetime.datetime blob_tier_change_time: - Indicates when the access tier was last changed. - :ivar bool blob_tier_inferred: - Indicates whether the access tier was inferred by the service. - If false, it indicates that the tier was set explicitly. - :ivar bool deleted: - Whether this blob was deleted. - :ivar ~datetime.datetime deleted_time: - A datetime object representing the time at which the blob was deleted. - :ivar int remaining_retention_days: - The number of days that the blob will be retained before being permanently deleted by the service. - :ivar ~datetime.datetime creation_time: - Indicates when the blob was created, in UTC. - :ivar str archive_status: - Archive status of blob. - :ivar str encryption_key_sha256: - The SHA-256 hash of the provided encryption key. - :ivar str encryption_scope: - A predefined encryption scope used to encrypt the data on the service. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - :ivar bool request_server_encrypted: - Whether this blob is encrypted. - :ivar list(~azure.storage.blob.ObjectReplicationPolicy) object_replication_source_properties: - Only present for blobs that have policy ids and rule ids applied to them. - - .. versionadded:: 12.4.0 - - :ivar str object_replication_destination_policy: - Represents the Object Replication Policy Id that created this blob. - - .. versionadded:: 12.4.0 +class ImmutabilityPolicy(DictMixin): + """Optional parameters for setting the immutability policy of a blob, blob snapshot or blob version. - :ivar ~datetime.datetime last_accessed_on: - Indicates when the last Read/Write operation was performed on a Blob. + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. - .. versionadded:: 12.6.0 - - :ivar int tag_count: - Tags count on this blob. + :keyword ~datetime.datetime expiry_time: + Specifies the date time when the blobs immutability policy is set to expire. + :keyword str or ~azure.storage.blob.BlobImmutabilityPolicyMode policy_mode: + Specifies the immutability policy mode to set on the blob. + Possible values to set include: "Locked", "Unlocked". + "Mutable" can only be returned by service, don't set to "Mutable". + """ - .. versionadded:: 12.4.0 + expiry_time: Optional["datetime"] = None + """Specifies the date time when the blobs immutability policy is set to expire.""" + policy_mode: Optional[str] = None + """Specifies the immutability policy mode to set on the blob.""" - :ivar dict(str, str) tags: - Key value pair of tags on this blob. + def __init__(self, **kwargs: Any) -> None: + self.expiry_time = kwargs.pop('expiry_time', None) + self.policy_mode = kwargs.pop('policy_mode', None) - .. versionadded:: 12.4.0 + @classmethod + def _from_generated(cls, generated): + immutability_policy = cls() + immutability_policy.expiry_time = generated.properties.immutability_policy_expires_on + immutability_policy.policy_mode = generated.properties.immutability_policy_mode + return immutability_policy - """ - def __init__(self, **kwargs): - self.name = kwargs.get('name') - self.container = None - self.snapshot = kwargs.get('x-ms-snapshot') - self.version_id = kwargs.get('x-ms-version-id') - self.is_current_version = kwargs.get('x-ms-is-current-version') - self.blob_type = BlobType(kwargs['x-ms-blob-type']) if kwargs.get('x-ms-blob-type') else None - self.metadata = kwargs.get('metadata') - self.encrypted_metadata = kwargs.get('encrypted_metadata') - self.last_modified = kwargs.get('Last-Modified') - self.etag = kwargs.get('ETag') - self.size = kwargs.get('Content-Length') - self.content_range = kwargs.get('Content-Range') - self.append_blob_committed_block_count = kwargs.get('x-ms-blob-committed-block-count') - self.is_append_blob_sealed = kwargs.get('x-ms-blob-sealed') - self.page_blob_sequence_number = kwargs.get('x-ms-blob-sequence-number') - self.server_encrypted = kwargs.get('x-ms-server-encrypted') - self.copy = CopyProperties(**kwargs) - self.content_settings = ContentSettings(**kwargs) - self.lease = LeaseProperties(**kwargs) - self.blob_tier = kwargs.get('x-ms-access-tier') - self.rehydrate_priority = kwargs.get('x-ms-rehydrate-priority') - self.blob_tier_change_time = kwargs.get('x-ms-access-tier-change-time') - self.blob_tier_inferred = kwargs.get('x-ms-access-tier-inferred') - self.deleted = False - self.deleted_time = None - self.remaining_retention_days = None - self.creation_time = kwargs.get('x-ms-creation-time') - self.archive_status = kwargs.get('x-ms-archive-status') - self.encryption_key_sha256 = kwargs.get('x-ms-encryption-key-sha256') - self.encryption_scope = kwargs.get('x-ms-encryption-scope') - self.request_server_encrypted = kwargs.get('x-ms-server-encrypted') - self.object_replication_source_properties = kwargs.get('object_replication_source_properties') - self.object_replication_destination_policy = kwargs.get('x-ms-or-policy-id') - self.last_accessed_on = kwargs.get('x-ms-last-access-time') - self.tag_count = kwargs.get('x-ms-tag-count') - self.tags = None +class FilteredBlob(DictMixin): + """Blob info from a Filter Blobs API call.""" + name: str + """Blob name""" + container_name: Optional[str] + """Container name.""" + tags: Optional[Dict[str, str]] + """Key value pairs of blob tags.""" -class FilteredBlob(DictMixin): - """Blob info from a Filter Blobs API call. - - :ivar name: Blob name - :type name: str - :ivar container_name: Container name. - :type container_name: str - :ivar tags: Key value pairs of blob tags. - :type tags: Dict[str, str] - """ - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: self.name = kwargs.get('name', None) self.container_name = kwargs.get('container_name', None) self.tags = kwargs.get('tags', None) class LeaseProperties(DictMixin): - """Blob Lease Properties. - - :ivar str status: - The lease status of the blob. Possible values: locked|unlocked - :ivar str state: - Lease state of the blob. Possible values: available|leased|expired|breaking|broken - :ivar str duration: - When a blob is leased, specifies whether the lease is of infinite or fixed duration. - """ + """Blob Lease Properties.""" + + status: str + """The lease status of the blob. Possible values: locked|unlocked""" + state: str + """Lease state of the blob. Possible values: available|leased|expired|breaking|broken""" + duration: Optional[str] + """When a blob is leased, specifies whether the lease is of infinite or fixed duration.""" - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: self.status = get_enum_value(kwargs.get('x-ms-lease-status')) self.state = get_enum_value(kwargs.get('x-ms-lease-state')) self.duration = get_enum_value(kwargs.get('x-ms-lease-duration')) @@ -601,33 +632,51 @@ def _from_generated(cls, generated): class ContentSettings(DictMixin): """The content settings of a blob. - :param str content_type: + :param Optional[str] content_type: The content type specified for the blob. If no content type was specified, the default content type is application/octet-stream. - :param str content_encoding: + :param Optional[str] content_encoding: If the content_encoding has previously been set for the blob, that value is stored. - :param str content_language: + :param Optional[str] content_language: If the content_language has previously been set for the blob, that value is stored. - :param str content_disposition: + :param Optional[str] content_disposition: content_disposition conveys additional information about how to process the response payload, and also can be used to attach additional metadata. If content_disposition has previously been set for the blob, that value is stored. - :param str cache_control: + :param Optional[str] cache_control: If the cache_control has previously been set for the blob, that value is stored. - :param str content_md5: + :param Optional[bytearray] content_md5: If the content_md5 has been set for the blob, this response header is stored so that the client can check for message content integrity. """ + content_type: Optional[str] = None + """The content type specified for the blob.""" + content_encoding: Optional[str] = None + """The content encoding specified for the blob.""" + content_language: Optional[str] = None + """The content language specified for the blob.""" + content_disposition: Optional[str] = None + """The content disposition specified for the blob.""" + cache_control: Optional[str] = None + """The cache control specified for the blob.""" + content_md5: Optional[bytearray] = None + """The content md5 specified for the blob.""" + def __init__( - self, content_type=None, content_encoding=None, - content_language=None, content_disposition=None, - cache_control=None, content_md5=None, **kwargs): + self, content_type: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_disposition: Optional[str] = None, + cache_control: Optional[str] = None, + content_md5: Optional[bytearray] = None, + **kwargs: Any + ) -> None: self.content_type = content_type or kwargs.get('Content-Type') self.content_encoding = content_encoding or kwargs.get('Content-Encoding') @@ -654,51 +703,47 @@ class CopyProperties(DictMixin): These properties will be `None` if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation, for example, using Set Blob Properties, Upload Blob, or Commit Block List. + """ - :ivar str id: - String identifier for the last attempted Copy Blob operation where this blob - was the destination blob. - :ivar str source: - URL up to 2 KB in length that specifies the source blob used in the last attempted - Copy Blob operation where this blob was the destination blob. - :ivar str status: - State of the copy operation identified by Copy ID, with these values: - success: - Copy completed successfully. - pending: - Copy is in progress. Check copy_status_description if intermittent, - non-fatal errors impede copy progress but don't cause failure. - aborted: - Copy was ended by Abort Copy Blob. - failed: - Copy failed. See copy_status_description for failure details. - :ivar str progress: - Contains the number of bytes copied and the total bytes in the source in the last + id: Optional[str] + """String identifier for the last attempted Copy Blob operation where this blob + was the destination blob.""" + source: Optional[str] + """URL up to 2 KB in length that specifies the source blob used in the last attempted + Copy Blob operation where this blob was the destination blob.""" + status: Optional[str] + """State of the copy operation identified by Copy ID, with these values: + success: Copy completed successfully. + pending: Copy is in progress. Check copy_status_description if intermittent, non-fatal errors impede copy progress + but don't cause failure. + aborted: Copy was ended by Abort Copy Blob. + failed: Copy failed. See copy_status_description for failure details.""" + progress: Optional[str] + """Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show - between 0 and Content-Length bytes copied. - :ivar ~datetime.datetime completion_time: - Conclusion time of the last attempted Copy Blob operation where this blob was the + between 0 and Content-Length bytes copied.""" + completion_time: Optional["datetime"] + """Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or - failed copy attempt. - :ivar str status_description: - Only appears when x-ms-copy-status is failed or pending. Describes cause of fatal - or non-fatal copy operation failure. - :ivar bool incremental_copy: - Copies the snapshot of the source page blob to a destination page blob. + failed copy attempt.""" + status_description: Optional[str] + """Only appears when x-ms-copy-status is failed or pending. Describes cause of fatal + or non-fatal copy operation failure.""" + incremental_copy: Optional[bool] + """Copies the snapshot of the source page blob to a destination page blob. The snapshot is copied such that only the differential changes between - the previously copied snapshot are transferred to the destination - :ivar ~datetime.datetime destination_snapshot: - Included if the blob is incremental copy blob or incremental copy snapshot, + the previously copied snapshot are transferred to the destination.""" + destination_snapshot: Optional["datetime"] + """Included if the blob is incremental copy blob or incremental copy snapshot, if x-ms-copy-status is success. Snapshot time of the last successful - incremental copy snapshot for this blob. - """ + incremental copy snapshot for this blob.""" - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: self.id = kwargs.get('x-ms-copy-id') self.source = kwargs.get('x-ms-copy-source') self.status = get_enum_value(kwargs.get('x-ms-copy-status')) self.progress = kwargs.get('x-ms-copy-progress') - self.completion_time = kwargs.get('x-ms-copy-completion_time') + self.completion_time = kwargs.get('x-ms-copy-completion-time') self.status_description = kwargs.get('x-ms-copy-status-description') self.incremental_copy = kwargs.get('x-ms-incremental-copy') self.destination_snapshot = kwargs.get('x-ms-copy-destination-snapshot') @@ -722,20 +767,33 @@ class BlobBlock(DictMixin): :param str block_id: Block id. - :param str state: - Block state. Possible values: committed|uncommitted - :ivar int size: - Block size in bytes. + :param BlockState state: + Block state. Possible values: BlockState.COMMITTED | BlockState.UNCOMMITTED """ - def __init__(self, block_id, state=BlockState.Latest): + block_id: str + """Block id.""" + state: BlockState + """Block state.""" + size: int + """Block size.""" + + def __init__(self, block_id: str, state: BlockState = BlockState.LATEST) -> None: self.id = block_id self.state = state - self.size = None + self.size = None # type: ignore [assignment] @classmethod def _from_generated(cls, generated): - block = cls(decode_base64_to_text(generated.name)) + try: + decoded_bytes = decode_base64_to_bytes(generated.name) + block_id = decoded_bytes.decode('utf-8') + # this is to fix a bug. When large blocks are uploaded through upload_blob the block id isn't base64 encoded + # while service expected block id is base64 encoded, so when we get block_id if we cannot base64 decode, it + # means we didn't base64 encode it when stage the block, we want to use the returned block_id directly. + except UnicodeDecodeError: + block_id = generated.name + block = cls(block_id) block.size = generated.size return block @@ -749,58 +807,53 @@ class PageRange(DictMixin): End of page range in bytes. """ - def __init__(self, start=None, end=None): + start: Optional[int] = None + """Start of page range in bytes.""" + end: Optional[int] = None + """End of page range in bytes.""" + cleared: bool + """Whether the range has been cleared.""" + + def __init__(self, start: Optional[int] = None, end: Optional[int] = None, *, cleared: bool = False) -> None: self.start = start self.end = end + self.cleared = cleared -class AccessPolicy(GenAccessPolicy): - """Access Policy class used by the set and get access policy methods in each service. +class PageRangePaged(PageIterator): + def __init__(self, command, results_per_page=None, continuation_token=None): + super(PageRangePaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.results_per_page = results_per_page + self.location_mode = None + self.current_page = [] - A stored access policy can specify the start time, expiry time, and - permissions for the Shared Access Signatures with which it's associated. - Depending on how you want to control access to your resource, you can - specify all of these parameters within the stored access policy, and omit - them from the URL for the Shared Access Signature. Doing so permits you to - modify the associated signature's behavior at any time, as well as to revoke - it. Or you can specify one or more of the access policy parameters within - the stored access policy, and the others on the URL. Finally, you can - specify all of the parameters on the URL. In this case, you can use the - stored access policy to revoke the signature, but not to modify its behavior. + def _get_next_cb(self, continuation_token): + try: + return self._command( + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) - Together the Shared Access Signature and the stored access policy must - include all fields required to authenticate the signature. If any required - fields are missing, the request will fail. Likewise, if a field is specified - both in the Shared Access Signature URL and in the stored access policy, the - request will fail with status code 400 (Bad Request). + def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.current_page = self._build_page(self._response) - :param permission: - The permissions associated with the shared access signature. The - user is restricted to operations allowed by the permissions. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has been - specified in an associated stored access policy. - :type permission: str or ~azure.storage.blob.ContainerSasPermissions - :param expiry: - The time at which the shared access signature becomes invalid. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has - been specified in an associated stored access policy. Azure will always - convert values to UTC. If a date is passed in without timezone info, it - is assumed to be UTC. - :type expiry: ~datetime.datetime or str - :param start: - The time at which the shared access signature becomes valid. If - omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. - :type start: ~datetime.datetime or str - """ - def __init__(self, permission=None, expiry=None, start=None): - self.start = start - self.expiry = expiry - self.permission = permission + return self._response.next_marker or None, self.current_page + + @staticmethod + def _build_page(response): + if not response: + raise StopIteration + + return parse_page_list(response) class ContainerSasPermissions(object): @@ -828,26 +881,92 @@ class ContainerSasPermissions(object): List blobs in the container. :param bool tag: Set or get tags on the blobs in the container. + :keyword bool add: + Add a block to an append blob. + :keyword bool create: + Write a new blob, snapshot a blob, or copy a blob to a new blob. + :keyword bool permanent_delete: + To enable permanent delete on the blob is permitted. + :keyword bool filter_by_tags: + To enable finding blobs by tags. + :keyword bool move: + Move a blob or a directory and its contents to a new location. + :keyword bool execute: + Get the system properties and, if the hierarchical namespace is enabled for the storage account, + get the POSIX ACL of a blob. + :keyword bool set_immutability_policy: + To enable operations related to set/delete immutability policy. + To get immutability policy, you just need read permission. """ - def __init__(self, read=False, write=False, delete=False, list=False, delete_previous_version=False, tag=False): # pylint: disable=redefined-builtin + + read: bool = False + """The read permission for container SAS.""" + write: bool = False + """The write permission for container SAS.""" + delete: bool = False + """The delete permission for container SAS.""" + delete_previous_version: bool = False + """Permission to delete previous blob version for versioning enabled + storage accounts.""" + list: bool = False + """The list permission for container SAS.""" + tag: bool = False + """Set or get tags on the blobs in the container.""" + add: Optional[bool] + """Add a block to an append blob.""" + create: Optional[bool] + """Write a new blob, snapshot a blob, or copy a blob to a new blob.""" + permanent_delete: Optional[bool] + """To enable permanent delete on the blob is permitted.""" + move: Optional[bool] + """Move a blob or a directory and its contents to a new location.""" + execute: Optional[bool] + """Get the system properties and, if the hierarchical namespace is enabled for the storage account, + get the POSIX ACL of a blob.""" + set_immutability_policy: Optional[bool] + """To get immutability policy, you just need read permission.""" + + def __init__( + self, read: bool = False, + write: bool = False, + delete: bool = False, + list: bool = False, + delete_previous_version: bool = False, + tag: bool = False, + **kwargs: Any + ) -> None: self.read = read + self.add = kwargs.pop('add', False) + self.create = kwargs.pop('create', False) self.write = write self.delete = delete - self.list = list self.delete_previous_version = delete_previous_version + self.permanent_delete = kwargs.pop('permanent_delete', False) + self.list = list self.tag = tag + self.filter_by_tags = kwargs.pop('filter_by_tags', False) + self.move = kwargs.pop('move', False) + self.execute = kwargs.pop('execute', False) + self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) self._str = (('r' if self.read else '') + + ('a' if self.add else '') + + ('c' if self.create else '') + ('w' if self.write else '') + ('d' if self.delete else '') + ('x' if self.delete_previous_version else '') + + ('y' if self.permanent_delete else '') + ('l' if self.list else '') + - ('t' if self.tag else '')) + ('t' if self.tag else '') + + ('f' if self.filter_by_tags else '') + + ('m' if self.move else '') + + ('e' if self.execute else '') + + ('i' if self.set_immutability_policy else '')) def __str__(self): return self._str @classmethod - def from_string(cls, permission): + def from_string(cls, permission: str) -> "ContainerSasPermissions": """Create a ContainerSasPermissions from a string. To specify read, write, delete, or list permissions you need only to @@ -860,17 +979,88 @@ def from_string(cls, permission): :rtype: ~azure.storage.blob.ContainerSasPermissions """ p_read = 'r' in permission + p_add = 'a' in permission + p_create = 'c' in permission p_write = 'w' in permission p_delete = 'd' in permission - p_list = 'l' in permission p_delete_previous_version = 'x' in permission + p_permanent_delete = 'y' in permission + p_list = 'l' in permission p_tag = 't' in permission + p_filter_by_tags = 'f' in permission + p_move = 'm' in permission + p_execute = 'e' in permission + p_set_immutability_policy = 'i' in permission parsed = cls(read=p_read, write=p_write, delete=p_delete, list=p_list, - delete_previous_version=p_delete_previous_version, tag=p_tag) + delete_previous_version=p_delete_previous_version, tag=p_tag, add=p_add, + create=p_create, permanent_delete=p_permanent_delete, filter_by_tags=p_filter_by_tags, + move=p_move, execute=p_execute, set_immutability_policy=p_set_immutability_policy) return parsed +class AccessPolicy(GenAccessPolicy): + """Access Policy class used by the set and get access policy methods in each service. + + A stored access policy can specify the start time, expiry time, and + permissions for the Shared Access Signatures with which it's associated. + Depending on how you want to control access to your resource, you can + specify all of these parameters within the stored access policy, and omit + them from the URL for the Shared Access Signature. Doing so permits you to + modify the associated signature's behavior at any time, as well as to revoke + it. Or you can specify one or more of the access policy parameters within + the stored access policy, and the others on the URL. Finally, you can + specify all of the parameters on the URL. In this case, you can use the + stored access policy to revoke the signature, but not to modify its behavior. + + Together the Shared Access Signature and the stored access policy must + include all fields required to authenticate the signature. If any required + fields are missing, the request will fail. Likewise, if a field is specified + both in the Shared Access Signature URL and in the stored access policy, the + request will fail with status code 400 (Bad Request). + + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: Optional[Union[ContainerSasPermissions, str]] + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :paramtype expiry: Optional[Union[str, datetime]] + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :paramtype start: Optional[Union[str, datetime]] + """ + + permission: Optional[Union[ContainerSasPermissions, str]] # type: ignore [assignment] + """The permissions associated with the shared access signature. The user is restricted to + operations allowed by the permissions.""" + expiry: Optional[Union["datetime", str]] # type: ignore [assignment] + """The time at which the shared access signature becomes invalid.""" + start: Optional[Union["datetime", str]] # type: ignore [assignment] + """The time at which the shared access signature becomes valid.""" + + def __init__( + self, permission: Optional[Union["ContainerSasPermissions", str]] = None, + expiry: Optional[Union[str, "datetime"]] = None, + start: Optional[Union[str, "datetime"]] = None + ) -> None: + self.start = start + self.expiry = expiry + self.permission = permission + + class BlobSasPermissions(object): """BlobSasPermissions class to be used with the :func:`~azure.storage.blob.generate_blob_sas` function. @@ -892,29 +1082,81 @@ class BlobSasPermissions(object): Delete the previous blob version for the versioning enabled storage account. :param bool tag: Set or get tags on the blob. + :keyword bool permanent_delete: + To enable permanent delete on the blob is permitted. + :keyword bool move: + Move a blob or a directory and its contents to a new location. + :keyword bool execute: + Get the system properties and, if the hierarchical namespace is enabled for the storage account, + get the POSIX ACL of a blob. + :keyword bool set_immutability_policy: + To enable operations related to set/delete immutability policy. + To get immutability policy, you just need read permission. """ - def __init__(self, read=False, add=False, create=False, write=False, - delete=False, delete_previous_version=False, tag=True): + + read: bool = False + """The read permission for Blob SAS.""" + add: Optional[bool] + """The add permission for Blob SAS.""" + create: Optional[bool] + """Write a new blob, snapshot a blob, or copy a blob to a new blob.""" + write: bool = False + """The write permission for Blob SAS.""" + delete: bool = False + """The delete permission for Blob SAS.""" + delete_previous_version: bool = False + """Permission to delete previous blob version for versioning enabled + storage accounts.""" + tag: bool = False + """Set or get tags on the blobs in the Blob.""" + permanent_delete: Optional[bool] + """To enable permanent delete on the blob is permitted.""" + move: Optional[bool] + """Move a blob or a directory and its contents to a new location.""" + execute: Optional[bool] + """Get the system properties and, if the hierarchical namespace is enabled for the storage account, + get the POSIX ACL of a blob.""" + set_immutability_policy: Optional[bool] + """To get immutability policy, you just need read permission.""" + + def __init__( + self, read: bool = False, + add: bool = False, + create: bool = False, + write: bool = False, + delete: bool = False, + delete_previous_version: bool = False, + tag: bool = False, + **kwargs: Any + ) -> None: self.read = read self.add = add self.create = create self.write = write self.delete = delete self.delete_previous_version = delete_previous_version + self.permanent_delete = kwargs.pop('permanent_delete', False) self.tag = tag + self.move = kwargs.pop('move', False) + self.execute = kwargs.pop('execute', False) + self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) self._str = (('r' if self.read else '') + ('a' if self.add else '') + ('c' if self.create else '') + ('w' if self.write else '') + ('d' if self.delete else '') + ('x' if self.delete_previous_version else '') + - ('t' if self.tag else '')) + ('y' if self.permanent_delete else '') + + ('t' if self.tag else '') + + ('m' if self.move else '') + + ('e' if self.execute else '') + + ('i' if self.set_immutability_policy else '')) def __str__(self): return self._str @classmethod - def from_string(cls, permission): + def from_string(cls, permission: str) -> "BlobSasPermissions": """Create a BlobSasPermissions from a string. To specify read, add, create, write, or delete permissions you need only to @@ -932,10 +1174,15 @@ def from_string(cls, permission): p_write = 'w' in permission p_delete = 'd' in permission p_delete_previous_version = 'x' in permission + p_permanent_delete = 'y' in permission p_tag = 't' in permission + p_move = 'm' in permission + p_execute = 'e' in permission + p_set_immutability_policy = 'i' in permission parsed = cls(read=p_read, add=p_add, create=p_create, write=p_write, delete=p_delete, - delete_previous_version=p_delete_previous_version, tag=p_tag) + delete_previous_version=p_delete_previous_version, tag=p_tag, permanent_delete=p_permanent_delete, + move=p_move, execute=p_execute, set_immutability_policy=p_set_immutability_policy) return parsed @@ -959,10 +1206,16 @@ class CustomerProvidedEncryptionKey(object): Base64-encoded AES-256 encryption key value. :param str key_hash: Base64-encoded SHA256 of the encryption key. - :ivar str algorithm: - Specifies the algorithm to use when encrypting data using the given key. Must be AES256. """ - def __init__(self, key_value, key_hash): + + key_value: str + """Base64-encoded AES-256 encryption key value.""" + key_hash: str + """Base64-encoded SHA256 of the encryption key.""" + algorithm: str + """Specifies the algorithm to use when encrypting data using the given key. Must be AES256.""" + + def __init__(self, key_value: str, key_hash: str) -> None: self.key_value = key_value self.key_hash = key_hash self.algorithm = 'AES256' @@ -984,7 +1237,14 @@ class ContainerEncryptionScope(object): set on the container. Default value is false. """ - def __init__(self, default_encryption_scope, **kwargs): + default_encryption_scope: str + """Specifies the default encryption scope to set on the container and use for + all future writes.""" + prevent_encryption_scope_override: bool + """If true, prevents any request from specifying a different encryption scope than the scope + set on the container.""" + + def __init__(self, default_encryption_scope: str, **kwargs: Any) -> None: self.default_encryption_scope = default_encryption_scope self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', False) @@ -999,17 +1259,17 @@ def _from_generated(cls, generated): return None -class DelimitedJsonDialect(object): +class DelimitedJsonDialect(DictMixin): """Defines the input or output JSON serialization for a blob data query. - :keyword str delimiter: The line separator character, default value is '\n' + :keyword str delimiter: The line separator character, default value is '\\\\n'. """ - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: self.delimiter = kwargs.pop('delimiter', '\n') -class DelimitedTextDialect(object): +class DelimitedTextDialect(DictMixin): """Defines the input or output delimited (CSV) serialization for a blob query request. :keyword str delimiter: @@ -1017,7 +1277,7 @@ class DelimitedTextDialect(object): :keyword str quotechar: Field quote, defaults to '"'. :keyword str lineterminator: - Record separator, defaults to '\n'. + Record separator, defaults to '\\\\n'. :keyword str escapechar: Escape char, defaults to empty. :keyword bool has_header: @@ -1025,7 +1285,8 @@ class DelimitedTextDialect(object): data will be returned inclusive of the first line. If set to True, the data will be returned exclusive of the first line. """ - def __init__(self, **kwargs): + + def __init__(self, **kwargs: Any) -> None: self.delimiter = kwargs.pop('delimiter', ',') self.quotechar = kwargs.pop('quotechar', '"') self.lineterminator = kwargs.pop('lineterminator', '\n') @@ -1043,11 +1304,12 @@ class ArrowDialect(ArrowField): :keyword int precision: The precision of the field. :keyword int scale: The scale of the field. """ - def __init__(self, type, **kwargs): # pylint: disable=redefined-builtin + + def __init__(self, type, **kwargs: Any) -> None: # pylint: disable=redefined-builtin super(ArrowDialect, self).__init__(type=type, **kwargs) -class ArrowType(str, Enum): +class ArrowType(str, Enum, metaclass=CaseInsensitiveEnumMeta): INT64 = "int64" BOOL = "bool" @@ -1057,50 +1319,188 @@ class ArrowType(str, Enum): DECIMAL = 'decimal' -class ObjectReplicationPolicy(DictMixin): - """Policy id and rule ids applied to a blob. +class ObjectReplicationRule(DictMixin): + """Policy id and rule ids applied to a blob.""" - :ivar str policy_id: - Policy id for the blob. A replication policy gets created (policy id) when creating a source/destination pair. - :ivar list(~azure.storage.blob.ObjectReplicationRule) rules: - Within each policy there may be multiple replication rules. - e.g. rule 1= src/container/.pdf to dst/container2/; rule2 = src/container1/.jpg to dst/container3 - """ + rule_id: str + """Rule id.""" + status: str + """The status of the rule. It could be "Complete" or "Failed" """ - def __init__(self, **kwargs): - self.policy_id = kwargs.pop('policy_id', None) - self.rules = kwargs.pop('rules', None) + def __init__(self, **kwargs: Any) -> None: + self.rule_id = kwargs.pop('rule_id', None) # type: ignore [assignment] + self.status = kwargs.pop('status', None) # type: ignore [assignment] -class ObjectReplicationRule(DictMixin): - """Policy id and rule ids applied to a blob. +class ObjectReplicationPolicy(DictMixin): + """Policy id and rule ids applied to a blob.""" - :ivar str rule_id: - Rule id. - :ivar str status: - The status of the rule. It could be "Complete" or "Failed" - """ + policy_id: str + """Policy id for the blob. A replication policy gets created (policy id) when creating a source/destination pair.""" + rules: List[ObjectReplicationRule] + """Within each policy there may be multiple replication rules. + e.g. rule 1= src/container/.pdf to dst/container2/; rule2 = src/container1/.jpg to dst/container3""" - def __init__(self, **kwargs): - self.rule_id = kwargs.pop('rule_id', None) - self.status = kwargs.pop('status', None) + def __init__(self, **kwargs: Any) -> None: + self.policy_id = kwargs.pop('policy_id', None) # type: ignore [assignment] + self.rules = kwargs.pop('rules', []) + + +class BlobProperties(DictMixin): + """Blob Properties.""" + + name: str + """The name of the blob.""" + container: str + """The container in which the blob resides.""" + snapshot: Optional[str] + """Datetime value that uniquely identifies the blob snapshot.""" + blob_type: "BlobType" + """String indicating this blob's type.""" + metadata: Dict[str, str] + """Name-value pairs associated with the blob as metadata.""" + last_modified: "datetime" + """A datetime object representing the last time the blob was modified.""" + etag: str + """The ETag contains a value that you can use to perform operations + conditionally.""" + size: int + """The size of the content returned. If the entire blob was requested, + the length of blob in bytes. If a subset of the blob was requested, the + length of the returned subset.""" + content_range: Optional[str] + """Indicates the range of bytes returned in the event that the client + requested a subset of the blob.""" + append_blob_committed_block_count: Optional[int] + """(For Append Blobs) Number of committed blocks in the blob.""" + is_append_blob_sealed: Optional[bool] + """Indicate if the append blob is sealed or not.""" + page_blob_sequence_number: Optional[int] + """(For Page Blobs) Sequence number for page blob used for coordinating + concurrent writes.""" + server_encrypted: bool + """Set to true if the blob is encrypted on the server.""" + copy: "CopyProperties" + """Stores all the copy properties for the blob.""" + content_settings: ContentSettings + """Stores all the content settings for the blob.""" + lease: LeaseProperties + """Stores all the lease information for the blob.""" + blob_tier: Optional[StandardBlobTier] + """Indicates the access tier of the blob. The hot tier is optimized + for storing data that is accessed frequently. The cool storage tier + is optimized for storing data that is infrequently accessed and stored + for at least a month. The archive tier is optimized for storing + data that is rarely accessed and stored for at least six months + with flexible latency requirements.""" + rehydrate_priority: Optional[str] + """Indicates the priority with which to rehydrate an archived blob""" + blob_tier_change_time: Optional["datetime"] + """Indicates when the access tier was last changed.""" + blob_tier_inferred: Optional[bool] + """Indicates whether the access tier was inferred by the service. + If false, it indicates that the tier was set explicitly.""" + deleted: Optional[bool] + """Whether this blob was deleted.""" + deleted_time: Optional["datetime"] + """A datetime object representing the time at which the blob was deleted.""" + remaining_retention_days: Optional[int] + """The number of days that the blob will be retained before being permanently deleted by the service.""" + creation_time: "datetime" + """Indicates when the blob was created, in UTC.""" + archive_status: Optional[str] + """Archive status of blob.""" + encryption_key_sha256: Optional[str] + """The SHA-256 hash of the provided encryption key.""" + encryption_scope: Optional[str] + """A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised.""" + request_server_encrypted: Optional[bool] + """Whether this blob is encrypted.""" + object_replication_source_properties: Optional[List[ObjectReplicationPolicy]] + """Only present for blobs that have policy ids and rule ids applied to them.""" + object_replication_destination_policy: Optional[str] + """Represents the Object Replication Policy Id that created this blob.""" + last_accessed_on: Optional["datetime"] + """Indicates when the last Read/Write operation was performed on a Blob.""" + tag_count: Optional[int] + """Tags count on this blob.""" + tags: Optional[Dict[str, str]] + """Key value pair of tags on this blob.""" + has_versions_only: Optional[bool] + """A true value indicates the root blob is deleted""" + immutability_policy: ImmutabilityPolicy + """Specifies the immutability policy of a blob, blob snapshot or blob version.""" + has_legal_hold: Optional[bool] + """Specified if a legal hold should be set on the blob. + Currently this parameter of upload_blob() API is for BlockBlob only.""" + + def __init__(self, **kwargs: Any) -> None: + self.name = kwargs.get('name') # type: ignore [assignment] + self.container = None # type: ignore [assignment] + self.snapshot = kwargs.get('x-ms-snapshot') + self.version_id = kwargs.get('x-ms-version-id') + self.is_current_version = kwargs.get('x-ms-is-current-version') + self.blob_type = BlobType(kwargs['x-ms-blob-type']) if ( + kwargs.get('x-ms-blob-type')) else None # type: ignore [assignment] + self.metadata = kwargs.get('metadata') # type: ignore [assignment] + self.encrypted_metadata = kwargs.get('encrypted_metadata') + self.last_modified = kwargs.get('Last-Modified') # type: ignore [assignment] + self.etag = kwargs.get('ETag') # type: ignore [assignment] + self.size = kwargs.get('Content-Length') # type: ignore [assignment] + self.content_range = kwargs.get('Content-Range') + self.append_blob_committed_block_count = kwargs.get('x-ms-blob-committed-block-count') + self.is_append_blob_sealed = kwargs.get('x-ms-blob-sealed') + self.page_blob_sequence_number = kwargs.get('x-ms-blob-sequence-number') + self.server_encrypted = kwargs.get('x-ms-server-encrypted') # type: ignore [assignment] + self.copy = CopyProperties(**kwargs) + self.content_settings = ContentSettings(**kwargs) + self.lease = LeaseProperties(**kwargs) + self.blob_tier = kwargs.get('x-ms-access-tier') + self.rehydrate_priority = kwargs.get('x-ms-rehydrate-priority') + self.blob_tier_change_time = kwargs.get('x-ms-access-tier-change-time') + self.blob_tier_inferred = kwargs.get('x-ms-access-tier-inferred') + self.deleted = False + self.deleted_time = None + self.remaining_retention_days = None + self.creation_time = kwargs.get('x-ms-creation-time') # type: ignore [assignment] + self.archive_status = kwargs.get('x-ms-archive-status') + self.encryption_key_sha256 = kwargs.get('x-ms-encryption-key-sha256') + self.encryption_scope = kwargs.get('x-ms-encryption-scope') + self.request_server_encrypted = kwargs.get('x-ms-server-encrypted') + self.object_replication_source_properties = kwargs.get('object_replication_source_properties') + self.object_replication_destination_policy = kwargs.get('x-ms-or-policy-id') + self.last_accessed_on = kwargs.get('x-ms-last-access-time') + self.tag_count = kwargs.get('x-ms-tag-count') + self.tags = None + self.immutability_policy = ImmutabilityPolicy(expiry_time=kwargs.get('x-ms-immutability-policy-until-date'), + policy_mode=kwargs.get('x-ms-immutability-policy-mode')) + self.has_legal_hold = kwargs.get('x-ms-legal-hold') + self.has_versions_only = None class BlobQueryError(object): - """The error happened during quick query operation. + """The error happened during quick query operation.""" - :ivar str error: - The name of the error. - :ivar bool is_fatal: - If true, this error prevents further query processing. More result data may be returned, + error: Optional[str] + """The name of the error.""" + is_fatal: bool + """If true, this error prevents further query processing. More result data may be returned, but there is no guarantee that all of the original data will be processed. - If false, this error does not prevent further query processing. - :ivar str description: - A description of the error. - :ivar int position: - The blob offset at which the error occurred. - """ - def __init__(self, error=None, is_fatal=False, description=None, position=None): + If false, this error does not prevent further query processing.""" + description: Optional[str] + """A description of the error.""" + position: Optional[int] + """The blob offset at which the error occurred.""" + + def __init__( + self, error: Optional[str] = None, + is_fatal: bool = False, + description: Optional[str] = None, + position: Optional[int] = None + ) -> None: self.error = error self.is_fatal = is_fatal self.description = description diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_quick_query_helper.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_quick_query_helper.py index eb51d987b9ac..95f8a4427bba 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_quick_query_helper.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_quick_query_helper.py @@ -5,37 +5,39 @@ # -------------------------------------------------------------------------- from io import BytesIO -from typing import Union, Iterable, IO # pylint: disable=unused-import +from typing import Any, Dict, Generator, IO, Iterable, Optional, Type, Union, TYPE_CHECKING -from ._shared.avro.datafile import DataFileReader from ._shared.avro.avro_io import DatumReader +from ._shared.avro.datafile import DataFileReader + +if TYPE_CHECKING: + from ._models import BlobQueryError class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes - """A streaming object to read query results. - - :ivar str name: - The name of the blob being quered. - :ivar str container: - The name of the container where the blob is. - :ivar dict response_headers: - The response_headers of the quick query request. - :ivar bytes record_delimiter: - The delimiter used to separate lines, or records with the data. The `records` - method will return these lines via a generator. - """ + """A streaming object to read query results.""" + + name: str + """The name of the blob being quered.""" + container: str + """The name of the container where the blob is.""" + response_headers: Dict[str, Any] + """The response_headers of the quick query request.""" + record_delimiter: str + """The delimiter used to separate lines, or records with the data. The `records` + method will return these lines via a generator.""" def __init__( self, - name=None, - container=None, - errors=None, - record_delimiter='\n', - encoding=None, - headers=None, - response=None, - error_cls=None, - ): + name: str = None, # type: ignore [assignment] + container: str = None, # type: ignore [assignment] + errors: Any = None, + record_delimiter: str = '\n', + encoding: Optional[str] = None, + headers: Dict[str, Any] = None, # type: ignore [assignment] + response: Any = None, + error_cls: Type["BlobQueryError"] = None, # type: ignore [assignment] + ) -> None: self.name = name self.container = container self.response_headers = headers @@ -51,7 +53,7 @@ def __init__( def __len__(self): return self._size - def _process_record(self, result): + def _process_record(self, result: Dict[str, Any]) -> Optional[bytes]: self._size = result.get('totalBytes', self._size) self._bytes_processed = result.get('bytesScanned', self._bytes_processed) if 'data' in result: @@ -67,7 +69,7 @@ def _process_record(self, result): self._errors(error) return None - def _iter_stream(self): + def _iter_stream(self) -> Generator[bytes, None, None]: if self._first_result is not None: yield self._first_result for next_result in self._parsed_results: @@ -75,14 +77,14 @@ def _iter_stream(self): if processed_result is not None: yield processed_result - def readall(self): - # type: () -> Union[bytes, str] + def readall(self) -> Union[bytes, str]: """Return all query results. This operation is blocking until all data is downloaded. If encoding has been configured - this will be used to decode individual records are they are received. + :returns: The query results. :rtype: Union[bytes, str] """ stream = BytesIO() @@ -92,11 +94,10 @@ def readall(self): return data.decode(self._encoding) return data - def readinto(self, stream): - # type: (IO) -> None + def readinto(self, stream: IO) -> None: """Download the query result to a stream. - :param stream: + :param IO stream: The stream to download to. This can be an open file-handle, or any writable stream. :returns: None @@ -104,14 +105,14 @@ def readinto(self, stream): for record in self._iter_stream(): stream.write(record) - def records(self): - # type: () -> Iterable[Union[bytes, str]] + def records(self) -> Iterable[Union[bytes, str]]: """Returns a record generator for the query result. Records will be returned line by line. If encoding has been configured - this will be used to decode individual records are they are received. + :returns: A record generator for the query result. :rtype: Iterable[Union[bytes, str]] """ delimiter = self.record_delimiter.encode('utf-8') @@ -123,7 +124,6 @@ def records(self): yield record - class QuickQueryStreamer(object): """ File-like streaming iterator. @@ -153,8 +153,6 @@ def __next__(self): self._download_offset += len(next_part) return next_part - next = __next__ # Python 2 compatibility. - def tell(self): return self._point @@ -165,7 +163,7 @@ def seek(self, offset, whence=0): self._point += offset else: raise ValueError("whence must be 0, or 1") - if self._point < 0: + if self._point < 0: # pylint: disable=consider-using-max-builtin self._point = 0 # XXX is this right? def read(self, size): diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_serialize.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_serialize.py index fbfed9c9974c..e9d5eb190959 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_serialize.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_serialize.py @@ -3,7 +3,8 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=no-self-use +from typing import Any, cast, Dict, Optional, Tuple, Union, TYPE_CHECKING + try: from urllib.parse import quote except ImportError: @@ -11,23 +12,25 @@ from azure.core import MatchConditions -from ._models import ( - ContainerEncryptionScope, - DelimitedJsonDialect) from ._generated.models import ( - ModifiedAccessConditions, - SourceModifiedAccessConditions, - CpkScopeInfo, + ArrowConfiguration, + BlobTag, + BlobTags, ContainerCpkScopeInfo, - QueryFormat, - QuerySerialization, + CpkScopeInfo, DelimitedTextConfiguration, JsonTextConfiguration, - ArrowConfiguration, + LeaseAccessConditions, + ModifiedAccessConditions, + QueryFormat, QueryFormatType, - BlobTag, - BlobTags, LeaseAccessConditions + QuerySerialization, + SourceModifiedAccessConditions ) +from ._models import ContainerEncryptionScope, DelimitedJsonDialect + +if TYPE_CHECKING: + from ._lease import BlobLeaseClient _SUPPORTED_API_VERSIONS = [ @@ -36,37 +39,57 @@ '2019-10-10', '2019-12-12', '2020-02-10', - '2020-04-08' + '2020-04-08', + '2020-06-12', + '2020-08-04', + '2020-10-02', + '2020-12-06', + '2021-02-12', + '2021-04-10', + '2021-06-08', + '2021-08-06', + '2021-12-02', + '2022-11-02', + '2023-01-03', + '2023-05-03', + '2023-08-03', + '2023-11-03', + '2024-05-04', + '2024-08-04', + '2024-11-04', + '2025-01-05', ] -def _get_match_headers(kwargs, match_param, etag_param): - # type: (str) -> Tuple(Dict[str, Any], Optional[str], Optional[str]) +def _get_match_headers( + kwargs: Dict[str, Any], + match_param: str, + etag_param: str +) -> Tuple[Optional[str], Optional[Any]]: if_match = None if_none_match = None match_condition = kwargs.pop(match_param, None) if match_condition == MatchConditions.IfNotModified: if_match = kwargs.pop(etag_param, None) if not if_match: - raise ValueError("'{}' specified without '{}'.".format(match_param, etag_param)) + raise ValueError(f"'{match_param}' specified without '{etag_param}'.") elif match_condition == MatchConditions.IfPresent: if_match = '*' elif match_condition == MatchConditions.IfModified: if_none_match = kwargs.pop(etag_param, None) if not if_none_match: - raise ValueError("'{}' specified without '{}'.".format(match_param, etag_param)) + raise ValueError(f"'{match_param}' specified without '{etag_param}'.") elif match_condition == MatchConditions.IfMissing: if_none_match = '*' elif match_condition is None: if kwargs.get(etag_param): - raise ValueError("'{}' specified without '{}'.".format(etag_param, match_param)) + raise ValueError(f"'{etag_param}' specified without '{match_param}'.") else: - raise TypeError("Invalid match condition: {}".format(match_condition)) + raise TypeError(f"Invalid match condition: {match_condition}") return if_match, if_none_match -def get_access_conditions(lease): - # type: (Optional[Union[BlobLeaseClient, str]]) -> Union[LeaseAccessConditions, None] +def get_access_conditions(lease: Optional[Union["BlobLeaseClient", str]]) -> Optional[LeaseAccessConditions]: try: lease_id = lease.id # type: ignore except AttributeError: @@ -74,8 +97,7 @@ def get_access_conditions(lease): return LeaseAccessConditions(lease_id=lease_id) if lease_id else None -def get_modify_conditions(kwargs): - # type: (Dict[str, Any]) -> ModifiedAccessConditions +def get_modify_conditions(kwargs: Dict[str, Any]) -> ModifiedAccessConditions: if_match, if_none_match = _get_match_headers(kwargs, 'match_condition', 'etag') return ModifiedAccessConditions( if_modified_since=kwargs.pop('if_modified_since', None), @@ -86,8 +108,7 @@ def get_modify_conditions(kwargs): ) -def get_source_conditions(kwargs): - # type: (Dict[str, Any]) -> SourceModifiedAccessConditions +def get_source_conditions(kwargs: Dict[str, Any]) -> SourceModifiedAccessConditions: if_match, if_none_match = _get_match_headers(kwargs, 'source_match_condition', 'source_etag') return SourceModifiedAccessConditions( source_if_modified_since=kwargs.pop('source_if_modified_since', None), @@ -98,15 +119,13 @@ def get_source_conditions(kwargs): ) -def get_cpk_scope_info(kwargs): - # type: (Dict[str, Any]) -> CpkScopeInfo +def get_cpk_scope_info(kwargs: Dict[str, Any]) -> Optional[CpkScopeInfo]: if 'encryption_scope' in kwargs: return CpkScopeInfo(encryption_scope=kwargs.pop('encryption_scope')) return None -def get_container_cpk_scope_info(kwargs): - # type: (Dict[str, Any]) -> ContainerCpkScopeInfo +def get_container_cpk_scope_info(kwargs: Dict[str, Any]) -> Optional[ContainerCpkScopeInfo]: encryption_scope = kwargs.pop('container_encryption_scope', None) if encryption_scope: if isinstance(encryption_scope, ContainerEncryptionScope): @@ -123,17 +142,19 @@ def get_container_cpk_scope_info(kwargs): return None -def get_api_version(kwargs, default): - # type: (Dict[str, Any]) -> str - api_version = kwargs.pop('api_version', None) +def get_api_version(kwargs: Dict[str, Any]) -> str: + api_version = kwargs.get('api_version', None) if api_version and api_version not in _SUPPORTED_API_VERSIONS: versions = '\n'.join(_SUPPORTED_API_VERSIONS) - raise ValueError("Unsupported API version '{}'. Please select from:\n{}".format(api_version, versions)) - return api_version or default + raise ValueError(f"Unsupported API version '{api_version}'. Please select from:\n{versions}") + return api_version or _SUPPORTED_API_VERSIONS[-1] +def get_version_id(self_vid: Optional[str], kwargs: Dict[str, Any]) -> Optional[str]: + if 'version_id' in kwargs: + return cast(str, kwargs.pop('version_id')) + return self_vid -def serialize_blob_tags_header(tags=None): - # type: (Optional[Dict[str, str]]) -> str +def serialize_blob_tags_header(tags: Optional[Dict[str, str]] = None) -> Optional[str]: if tags is None: return None @@ -151,28 +172,27 @@ def serialize_blob_tags_header(tags=None): return ''.join(components) -def serialize_blob_tags(tags=None): - # type: (Optional[Dict[str, str]]) -> Union[BlobTags, None] +def serialize_blob_tags(tags: Optional[Dict[str, str]] = None) -> BlobTags: tag_list = [] if tags: tag_list = [BlobTag(key=k, value=v) for k, v in tags.items()] return BlobTags(blob_tag_set=tag_list) -def serialize_query_format(formater): - if isinstance(formater, DelimitedJsonDialect): - serialization_settings = JsonTextConfiguration( - record_separator=formater.delimiter - ) - qq_format = QueryFormat( - type=QueryFormatType.json, - json_text_configuration=serialization_settings) +def serialize_query_format(formater: Union[str, DelimitedJsonDialect]) -> Optional[QuerySerialization]: + if formater == "ParquetDialect": + qq_format = QueryFormat(type=QueryFormatType.PARQUET, parquet_text_configuration=' ') #type: ignore [arg-type] + elif isinstance(formater, DelimitedJsonDialect): + json_serialization_settings = JsonTextConfiguration(record_separator=formater.delimiter) + qq_format = QueryFormat(type=QueryFormatType.JSON, json_text_configuration=json_serialization_settings) elif hasattr(formater, 'quotechar'): # This supports a csv.Dialect as well try: - headers = formater.has_header + headers = formater.has_header # type: ignore except AttributeError: headers = False - serialization_settings = DelimitedTextConfiguration( + if isinstance(formater, str): + raise ValueError("Unknown string value provided. Accepted values: ParquetDialect") + csv_serialization_settings = DelimitedTextConfiguration( column_separator=formater.delimiter, field_quote=formater.quotechar, record_separator=formater.lineterminator, @@ -180,18 +200,14 @@ def serialize_query_format(formater): headers_present=headers ) qq_format = QueryFormat( - type=QueryFormatType.delimited, - delimited_text_configuration=serialization_settings + type=QueryFormatType.DELIMITED, + delimited_text_configuration=csv_serialization_settings ) elif isinstance(formater, list): - serialization_settings = ArrowConfiguration( - schema=formater - ) - qq_format = QueryFormat( - type=QueryFormatType.arrow, - arrow_configuration=serialization_settings) + arrow_serialization_settings = ArrowConfiguration(schema=formater) + qq_format = QueryFormat(type=QueryFormatType.arrow, arrow_configuration=arrow_serialization_settings) elif not formater: return None else: - raise TypeError("Format must be DelimitedTextDialect or DelimitedJsonDialect.") + raise TypeError("Format must be DelimitedTextDialect or DelimitedJsonDialect or ParquetDialect.") return QuerySerialization(format=qq_format) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/__init__.py index 160f88223820..a8b1a27d48f9 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/__init__.py @@ -13,8 +13,6 @@ except ImportError: from urllib2 import quote, unquote # type: ignore -import six - def url_quote(url): return quote(url) @@ -25,14 +23,14 @@ def url_unquote(url): def encode_base64(data): - if isinstance(data, six.text_type): + if isinstance(data, str): data = data.encode('utf-8') encoded = base64.b64encode(data) return encoded.decode('utf-8') def decode_base64_to_bytes(data): - if isinstance(data, six.text_type): + if isinstance(data, str): data = data.encode('utf-8') return base64.b64decode(data) @@ -46,9 +44,9 @@ def sign_string(key, string_to_sign, key_is_base64=True): if key_is_base64: key = decode_base64_to_bytes(key) else: - if isinstance(key, six.text_type): + if isinstance(key, str): key = key.encode('utf-8') - if isinstance(string_to_sign, six.text_type): + if isinstance(string_to_sign, str): string_to_sign = string_to_sign.encode('utf-8') signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256) digest = signed_hmac_sha256.digest() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/authentication.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/authentication.py index d04c1e4fb539..e4d5ed730846 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/authentication.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/authentication.py @@ -5,13 +5,10 @@ # -------------------------------------------------------------------------- import logging -import sys - -try: - from urllib.parse import urlparse, unquote -except ImportError: - from urlparse import urlparse # type: ignore - from urllib2 import unquote # type: ignore +import re +from typing import List, Tuple +from urllib.parse import unquote, urlparse +from functools import cmp_to_key try: from yarl import URL @@ -19,7 +16,7 @@ pass try: - from azure.core.pipeline.transport import AioHttpTransport + from azure.core.pipeline.transport import AioHttpTransport # pylint: disable=non-abstract-transport-import except ImportError: AioHttpTransport = None @@ -28,24 +25,96 @@ from . import sign_string - logger = logging.getLogger(__name__) +table_lv0 = [ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71c, 0x0, 0x71f, 0x721, 0x723, 0x725, + 0x0, 0x0, 0x0, 0x72d, 0x803, 0x0, 0x0, 0x733, 0x0, 0xd03, 0xd1a, 0xd1c, 0xd1e, + 0xd20, 0xd22, 0xd24, 0xd26, 0xd28, 0xd2a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0xe02, 0xe09, 0xe0a, 0xe1a, 0xe21, 0xe23, 0xe25, 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, + 0xe70, 0xe7c, 0xe7e, 0xe89, 0xe8a, 0xe91, 0xe99, 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, + 0x0, 0x0, 0x0, 0x743, 0x744, 0x748, 0xe02, 0xe09, 0xe0a, 0xe1a, 0xe21, 0xe23, 0xe25, + 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, 0xe70, 0xe7c, 0xe7e, 0xe89, 0xe8a, 0xe91, 0xe99, + 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, 0x0, 0x74c, 0x0, 0x750, 0x0, +] + +table_lv4 = [ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8012, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8212, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, +] + +def compare(lhs: str, rhs: str) -> int: # pylint:disable=too-many-return-statements + tables = [table_lv0, table_lv4] + curr_level, i, j, n = 0, 0, 0, len(tables) + lhs_len = len(lhs) + rhs_len = len(rhs) + while curr_level < n: + if curr_level == (n - 1) and i != j: + if i > j: + return -1 + if i < j: + return 1 + return 0 + + w1 = tables[curr_level][ord(lhs[i])] if i < lhs_len else 0x1 + w2 = tables[curr_level][ord(rhs[j])] if j < rhs_len else 0x1 + + if w1 == 0x1 and w2 == 0x1: + i = 0 + j = 0 + curr_level += 1 + elif w1 == w2: + i += 1 + j += 1 + elif w1 == 0: + i += 1 + elif w2 == 0: + j += 1 + else: + if w1 < w2: + return -1 + if w1 > w2: + return 1 + return 0 + return 0 + # wraps a given exception with the desired exception type def _wrap_exception(ex, desired_type): msg = "" if ex.args: msg = ex.args[0] - if sys.version_info >= (3,): - # Automatic chaining in Python 3 means we keep the trace - return desired_type(msg) - # There isn't a good solution in 2 for keeping the stack trace - # in general, or that will not result in an error in 3 - # However, we can keep the previous error type and message - # TODO: In the future we will log the trace - return desired_type('{}: {}'.format(ex.__class__.__name__, msg)) + return desired_type(msg) + +# This method attempts to emulate the sorting done by the service +def _storage_header_sort(input_headers: List[Tuple[str, str]]) -> List[Tuple[str, str]]: + + # Build dict of tuples and list of keys + header_dict = {} + header_keys = [] + for k, v in input_headers: + header_dict[k] = v + header_keys.append(k) + + try: + header_keys = sorted(header_keys, key=cmp_to_key(compare)) + except ValueError as exc: + raise ValueError("Illegal character encountered when sorting headers.") from exc + + # Build list of sorted tuples + sorted_headers = [] + for key in header_keys: + sorted_headers.append((key, header_dict.pop(key))) + return sorted_headers class AzureSigningError(ClientAuthenticationError): @@ -56,7 +125,6 @@ class AzureSigningError(ClientAuthenticationError): """ -# pylint: disable=no-self-use class SharedKeyCredentialPolicy(SansIOHTTPPolicy): def __init__(self, account_name, account_key): @@ -95,7 +163,7 @@ def _get_canonicalized_headers(request): for name, value in request.http_request.headers.items(): if name.startswith('x-ms-'): x_ms_headers.append((name.lower(), value)) - x_ms_headers.sort() + x_ms_headers = _storage_header_sort(x_ms_headers) for name, value in x_ms_headers: if value is not None: string_to_sign += ''.join([name, ':', value, '\n']) @@ -121,7 +189,7 @@ def _add_authorization_header(self, request, string_to_sign): except Exception as ex: # Wrap any error that occurred as signing error # Doing so will clarify/locate the source of problem - raise _wrap_exception(ex, AzureSigningError) + raise _wrap_exception(ex, AzureSigningError) from ex def on_request(self, request): string_to_sign = \ @@ -139,4 +207,39 @@ def on_request(self, request): self._get_canonicalized_resource_query(request) self._add_authorization_header(request, string_to_sign) - #logger.debug("String_to_sign=%s", string_to_sign) + # logger.debug("String_to_sign=%s", string_to_sign) + + +class StorageHttpChallenge(object): + def __init__(self, challenge): + """ Parses an HTTP WWW-Authentication Bearer challenge from the Storage service. """ + if not challenge: + raise ValueError("Challenge cannot be empty") + + self._parameters = {} + self.scheme, trimmed_challenge = challenge.strip().split(" ", 1) + + # name=value pairs either comma or space separated with values possibly being + # enclosed in quotes + for item in re.split('[, ]', trimmed_challenge): + comps = item.split("=") + if len(comps) == 2: + key = comps[0].strip(' "') + value = comps[1].strip(' "') + if key: + self._parameters[key] = value + + # Extract and verify required parameters + self.authorization_uri = self._parameters.get('authorization_uri') + if not self.authorization_uri: + raise ValueError("Authorization Uri not found") + + self.resource_id = self._parameters.get('resource_id') + if not self.resource_id: + raise ValueError("Resource id not found") + + uri_path = urlparse(self.authorization_uri).path.lstrip("/") + self.tenant_id = uri_path.split("/")[0] + + def get_value(self, key): + return self._parameters.get(key) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/avro_io.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/avro_io.py index 93a5c134849a..3e46f1fb53fe 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/avro_io.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/avro_io.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-missing-return, docstring-missing-rtype """Input/output utilities. @@ -52,7 +53,7 @@ class SchemaResolutionException(schema.AvroException): def __init__(self, fail_msg, writer_schema=None): pretty_writers = json.dumps(json.loads(str(writer_schema)), indent=2) if writer_schema: - fail_msg += "\nWriter's Schema: %s" % pretty_writers + fail_msg += f"\nWriter's Schema: {pretty_writers}" schema.AvroException.__init__(self, fail_msg) # ------------------------------------------------------------------------------ @@ -76,10 +77,9 @@ def reader(self): def read(self, n): """Read n bytes. - Args: - n: Number of bytes to read. - Returns: - The next n bytes from the input. + :param int n: Number of bytes to read. + :returns: The next n bytes from the input. + :rtype: bytes """ assert (n >= 0), n input_bytes = self.reader.read(n) @@ -105,7 +105,7 @@ def read_boolean(self): return True if b == 0: return False - fail_msg = "Invalid value for boolean: %s" % b + fail_msg = f"Invalid value for boolean: {b}" raise schema.AvroException(fail_msg) def read_int(self): @@ -253,7 +253,7 @@ def read_data(self, writer_schema, decoder): elif writer_schema.type in ['record', 'error', 'request']: result = self.read_record(writer_schema, decoder) else: - fail_msg = "Cannot read unknown schema type: %s" % writer_schema.type + fail_msg = f"Cannot read unknown schema type: {writer_schema.type}" raise schema.AvroException(fail_msg) return result @@ -290,33 +290,26 @@ def skip_data(self, writer_schema, decoder): self.skip_record(writer_schema, decoder) result = None else: - fail_msg = "Unknown schema type: %s" % writer_schema.type + fail_msg = f"Unknown schema type: {writer_schema.type}" raise schema.AvroException(fail_msg) return result + # Fixed instances are encoded using the number of bytes declared in the schema. @staticmethod def read_fixed(writer_schema, decoder): - """ - Fixed instances are encoded using the number of bytes declared - in the schema. - """ return decoder.read(writer_schema.size) @staticmethod def skip_fixed(writer_schema, decoder): return decoder.skip(writer_schema.size) + # An enum is encoded by a int, representing the zero-based position of the symbol in the schema. @staticmethod def read_enum(writer_schema, decoder): - """ - An enum is encoded by a int, representing the zero-based position - of the symbol in the schema. - """ # read data index_of_symbol = decoder.read_int() if index_of_symbol >= len(writer_schema.symbols): - fail_msg = "Can't access enum index %d for enum with %d symbols" \ - % (index_of_symbol, len(writer_schema.symbols)) + fail_msg = f"Can't access enum index {index_of_symbol} for enum with {len(writer_schema.symbols)} symbols" raise SchemaResolutionException(fail_msg, writer_schema) read_symbol = writer_schema.symbols[index_of_symbol] return read_symbol @@ -325,21 +318,15 @@ def read_enum(writer_schema, decoder): def skip_enum(decoder): return decoder.skip_int() + # Arrays are encoded as a series of blocks. + + # Each block consists of a long count value, followed by that many array items. + # A block with count zero indicates the end of the array. Each item is encoded per the array's item schema. + + # If a block's count is negative, then the count is followed immediately by a long block size, + # indicating the number of bytes in the block. + # The actual count in this case is the absolute value of the count written. def read_array(self, writer_schema, decoder): - """ - Arrays are encoded as a series of blocks. - - Each block consists of a long count value, - followed by that many array items. - A block with count zero indicates the end of the array. - Each item is encoded per the array's item schema. - - If a block's count is negative, - then the count is followed immediately by a long block size, - indicating the number of bytes in the block. - The actual count in this case - is the absolute value of the count written. - """ read_items = [] block_count = decoder.read_long() while block_count != 0: @@ -362,21 +349,15 @@ def skip_array(self, writer_schema, decoder): self.skip_data(writer_schema.items, decoder) block_count = decoder.read_long() + # Maps are encoded as a series of blocks. + + # Each block consists of a long count value, followed by that many key/value pairs. + # A block with count zero indicates the end of the map. Each item is encoded per the map's value schema. + + # If a block's count is negative, then the count is followed immediately by a long block size, + # indicating the number of bytes in the block. + # The actual count in this case is the absolute value of the count written. def read_map(self, writer_schema, decoder): - """ - Maps are encoded as a series of blocks. - - Each block consists of a long count value, - followed by that many key/value pairs. - A block with count zero indicates the end of the map. - Each item is encoded per the map's value schema. - - If a block's count is negative, - then the count is followed immediately by a long block size, - indicating the number of bytes in the block. - The actual count in this case - is the absolute value of the count written. - """ read_items = {} block_count = decoder.read_long() while block_count != 0: @@ -401,17 +382,15 @@ def skip_map(self, writer_schema, decoder): self.skip_data(writer_schema.values, decoder) block_count = decoder.read_long() + # A union is encoded by first writing a long value indicating + # the zero-based position within the union of the schema of its value. + # The value is then encoded per the indicated schema within the union. def read_union(self, writer_schema, decoder): - """ - A union is encoded by first writing a long value indicating - the zero-based position within the union of the schema of its value. - The value is then encoded per the indicated schema within the union. - """ # schema resolution index_of_schema = int(decoder.read_long()) if index_of_schema >= len(writer_schema.schemas): - fail_msg = "Can't access branch index %d for union with %d branches" \ - % (index_of_schema, len(writer_schema.schemas)) + fail_msg = (f"Can't access branch index {index_of_schema} " + f"for union with {len(writer_schema.schemas)} branches") raise SchemaResolutionException(fail_msg, writer_schema) selected_writer_schema = writer_schema.schemas[index_of_schema] @@ -421,31 +400,29 @@ def read_union(self, writer_schema, decoder): def skip_union(self, writer_schema, decoder): index_of_schema = int(decoder.read_long()) if index_of_schema >= len(writer_schema.schemas): - fail_msg = "Can't access branch index %d for union with %d branches" \ - % (index_of_schema, len(writer_schema.schemas)) + fail_msg = (f"Can't access branch index {index_of_schema} " + f"for union with {len(writer_schema.schemas)} branches") raise SchemaResolutionException(fail_msg, writer_schema) return self.skip_data(writer_schema.schemas[index_of_schema], decoder) + # A record is encoded by encoding the values of its fields + # in the order that they are declared. In other words, a record + # is encoded as just the concatenation of the encodings of its fields. + # Field values are encoded per their schema. + + # Schema Resolution: + # * the ordering of fields may be different: fields are matched by name. + # * schemas for fields with the same name in both records are resolved + # recursively. + # * if the writer's record contains a field with a name not present in the + # reader's record, the writer's value for that field is ignored. + # * if the reader's record schema has a field that contains a default value, + # and writer's schema does not have a field with the same name, then the + # reader should use the default value from its field. + # * if the reader's record schema has a field with no default value, and + # writer's schema does not have a field with the same name, then the + # field's value is unset. def read_record(self, writer_schema, decoder): - """ - A record is encoded by encoding the values of its fields - in the order that they are declared. In other words, a record - is encoded as just the concatenation of the encodings of its fields. - Field values are encoded per their schema. - - Schema Resolution: - * the ordering of fields may be different: fields are matched by name. - * schemas for fields with the same name in both records are resolved - recursively. - * if the writer's record contains a field with a name not present in the - reader's record, the writer's value for that field is ignored. - * if the reader's record schema has a field that contains a default value, - and writer's schema does not have a field with the same name, then the - reader should use the default value from its field. - * if the reader's record schema has a field with no default value, and - writer's schema does not have a field with the same name, then the - field's value is unset. - """ # schema resolution read_record = {} for field in writer_schema.fields: @@ -456,9 +433,3 @@ def read_record(self, writer_schema, decoder): def skip_record(self, writer_schema, decoder): for field in writer_schema.fields: self.skip_data(field.type, decoder) - - -# ------------------------------------------------------------------------------ - -if __name__ == '__main__': - raise Exception('Not a standalone module') diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/avro_io_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/avro_io_async.py index e9812163795f..8688661b5add 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/avro_io_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/avro_io_async.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-missing-return, docstring-missing-rtype """Input/output utilities. @@ -59,10 +60,9 @@ def reader(self): async def read(self, n): """Read n bytes. - Args: - n: Number of bytes to read. - Returns: - The next n bytes from the input. + :param int n: Number of bytes to read. + :returns: The next n bytes from the input. + :rtype: bytes """ assert (n >= 0), n input_bytes = await self.reader.read(n) @@ -88,7 +88,7 @@ async def read_boolean(self): return True if b == 0: return False - fail_msg = "Invalid value for boolean: %s" % b + fail_msg = f"Invalid value for boolean: {b}" raise schema.AvroException(fail_msg) async def read_int(self): @@ -237,7 +237,7 @@ async def read_data(self, writer_schema, decoder): elif writer_schema.type in ['record', 'error', 'request']: result = await self.read_record(writer_schema, decoder) else: - fail_msg = "Cannot read unknown schema type: %s" % writer_schema.type + fail_msg = f"Cannot read unknown schema type: {writer_schema.type}" raise schema.AvroException(fail_msg) return result @@ -274,33 +274,26 @@ async def skip_data(self, writer_schema, decoder): await self.skip_record(writer_schema, decoder) result = None else: - fail_msg = "Unknown schema type: %s" % writer_schema.type + fail_msg = f"Unknown schema type: {writer_schema.type}" raise schema.AvroException(fail_msg) return result + # Fixed instances are encoded using the number of bytes declared in the schema. @staticmethod async def read_fixed(writer_schema, decoder): - """ - Fixed instances are encoded using the number of bytes declared - in the schema. - """ return await decoder.read(writer_schema.size) @staticmethod async def skip_fixed(writer_schema, decoder): return await decoder.skip(writer_schema.size) + # An enum is encoded by a int, representing the zero-based position of the symbol in the schema. @staticmethod async def read_enum(writer_schema, decoder): - """ - An enum is encoded by a int, representing the zero-based position - of the symbol in the schema. - """ # read data index_of_symbol = await decoder.read_int() if index_of_symbol >= len(writer_schema.symbols): - fail_msg = "Can't access enum index %d for enum with %d symbols" \ - % (index_of_symbol, len(writer_schema.symbols)) + fail_msg = f"Can't access enum index {index_of_symbol} for enum with {len(writer_schema.symbols)} symbols" raise SchemaResolutionException(fail_msg, writer_schema) read_symbol = writer_schema.symbols[index_of_symbol] return read_symbol @@ -309,21 +302,15 @@ async def read_enum(writer_schema, decoder): async def skip_enum(decoder): return await decoder.skip_int() + # Arrays are encoded as a series of blocks. + + # Each block consists of a long count value, followed by that many array items. + # A block with count zero indicates the end of the array. Each item is encoded per the array's item schema. + + # If a block's count is negative, then the count is followed immediately by a long block size, + # indicating the number of bytes in the block. + # The actual count in this case is the absolute value of the count written. async def read_array(self, writer_schema, decoder): - """ - Arrays are encoded as a series of blocks. - - Each block consists of a long count value, - followed by that many array items. - A block with count zero indicates the end of the array. - Each item is encoded per the array's item schema. - - If a block's count is negative, - then the count is followed immediately by a long block size, - indicating the number of bytes in the block. - The actual count in this case - is the absolute value of the count written. - """ read_items = [] block_count = await decoder.read_long() while block_count != 0: @@ -346,21 +333,15 @@ async def skip_array(self, writer_schema, decoder): await self.skip_data(writer_schema.items, decoder) block_count = await decoder.read_long() + # Maps are encoded as a series of blocks. + + # Each block consists of a long count value, followed by that many key/value pairs. + # A block with count zero indicates the end of the map. Each item is encoded per the map's value schema. + + # If a block's count is negative, then the count is followed immediately by a long block size, + # indicating the number of bytes in the block. + # The actual count in this case is the absolute value of the count written. async def read_map(self, writer_schema, decoder): - """ - Maps are encoded as a series of blocks. - - Each block consists of a long count value, - followed by that many key/value pairs. - A block with count zero indicates the end of the map. - Each item is encoded per the map's value schema. - - If a block's count is negative, - then the count is followed immediately by a long block size, - indicating the number of bytes in the block. - The actual count in this case - is the absolute value of the count written. - """ read_items = {} block_count = await decoder.read_long() while block_count != 0: @@ -385,17 +366,15 @@ async def skip_map(self, writer_schema, decoder): await self.skip_data(writer_schema.values, decoder) block_count = await decoder.read_long() + # A union is encoded by first writing a long value indicating + # the zero-based position within the union of the schema of its value. + # The value is then encoded per the indicated schema within the union. async def read_union(self, writer_schema, decoder): - """ - A union is encoded by first writing a long value indicating - the zero-based position within the union of the schema of its value. - The value is then encoded per the indicated schema within the union. - """ # schema resolution index_of_schema = int(await decoder.read_long()) if index_of_schema >= len(writer_schema.schemas): - fail_msg = "Can't access branch index %d for union with %d branches" \ - % (index_of_schema, len(writer_schema.schemas)) + fail_msg = (f"Can't access branch index {index_of_schema} " + f"for union with {len(writer_schema.schemas)} branches") raise SchemaResolutionException(fail_msg, writer_schema) selected_writer_schema = writer_schema.schemas[index_of_schema] @@ -405,31 +384,29 @@ async def read_union(self, writer_schema, decoder): async def skip_union(self, writer_schema, decoder): index_of_schema = int(await decoder.read_long()) if index_of_schema >= len(writer_schema.schemas): - fail_msg = "Can't access branch index %d for union with %d branches" \ - % (index_of_schema, len(writer_schema.schemas)) + fail_msg = (f"Can't access branch index {index_of_schema} " + f"for union with {len(writer_schema.schemas)} branches") raise SchemaResolutionException(fail_msg, writer_schema) return await self.skip_data(writer_schema.schemas[index_of_schema], decoder) + # A record is encoded by encoding the values of its fields + # in the order that they are declared. In other words, a record + # is encoded as just the concatenation of the encodings of its fields. + # Field values are encoded per their schema. + + # Schema Resolution: + # * the ordering of fields may be different: fields are matched by name. + # * schemas for fields with the same name in both records are resolved + # recursively. + # * if the writer's record contains a field with a name not present in the + # reader's record, the writer's value for that field is ignored. + # * if the reader's record schema has a field that contains a default value, + # and writer's schema does not have a field with the same name, then the + # reader should use the default value from its field. + # * if the reader's record schema has a field with no default value, and + # writer's schema does not have a field with the same name, then the + # field's value is unset. async def read_record(self, writer_schema, decoder): - """ - A record is encoded by encoding the values of its fields - in the order that they are declared. In other words, a record - is encoded as just the concatenation of the encodings of its fields. - Field values are encoded per their schema. - - Schema Resolution: - * the ordering of fields may be different: fields are matched by name. - * schemas for fields with the same name in both records are resolved - recursively. - * if the writer's record contains a field with a name not present in the - reader's record, the writer's value for that field is ignored. - * if the reader's record schema has a field that contains a default value, - and writer's schema does not have a field with the same name, then the - reader should use the default value from its field. - * if the reader's record schema has a field with no default value, and - writer's schema does not have a field with the same name, then the - field's value is unset. - """ # schema resolution read_record = {} for field in writer_schema.fields: @@ -440,9 +417,3 @@ async def read_record(self, writer_schema, decoder): async def skip_record(self, writer_schema, decoder): for field in writer_schema.fields: await self.skip_data(field.type, decoder) - - -# ------------------------------------------------------------------------------ - -if __name__ == '__main__': - raise Exception('Not a standalone module') diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/datafile.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/datafile.py index df06fe0cfe76..757e0329cd07 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/datafile.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/datafile.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-missing-return, docstring-missing-rtype """Read/Write Avro File Object Containers.""" @@ -102,7 +103,7 @@ def __init__(self, reader, datum_reader, **kwargs): else: self.codec = avro_codec_raw.decode('utf-8') if self.codec not in VALID_CODECS: - raise DataFileException('Unknown codec: %s.' % self.codec) + raise DataFileException(f"Unknown codec: {self.codec}.") # get ready to read self._block_count = 0 @@ -166,10 +167,9 @@ def block_count(self): def get_meta(self, key): """Reports the value of a given metadata key. - Args: - key: Metadata key (string) to report the value of. - Returns: - Value associated to the metadata key, as bytes. + :param str key: Metadata key to report the value of. + :returns: Value associated to the metadata key, as bytes. + :rtype: bytes """ return self._meta.get(key) @@ -185,8 +185,7 @@ def _read_header(self): # check magic number if header.get('magic') != MAGIC: - fail_msg = "Not an Avro data file: %s doesn't match %s." \ - % (header.get('magic'), MAGIC) + fail_msg = f"Not an Avro data file: {header.get('magic')} doesn't match {MAGIC!r}." raise schema.AvroException(fail_msg) # set metadata @@ -210,7 +209,7 @@ def _read_block_header(self): uncompressed = zlib.decompress(data, -15) self._datum_decoder = avro_io.BinaryDecoder(io.BytesIO(uncompressed)) else: - raise DataFileException("Unknown codec: %r" % self.codec) + raise DataFileException(f"Unknown codec: {self.codec!r}") def _skip_sync(self): """ @@ -253,14 +252,6 @@ def __next__(self): return datum - # PY2 - def next(self): - return self.__next__() - def close(self): """Close this reader.""" self.reader.close() - - -if __name__ == '__main__': - raise Exception('Not a standalone module') diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/datafile_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/datafile_async.py index 1e9d018228de..85dc5cb582b3 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/datafile_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/datafile_async.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-missing-return, docstring-missing-rtype """Read/Write Avro File Object Containers.""" @@ -64,7 +65,7 @@ async def init(self): else: self.codec = avro_codec_raw.decode('utf-8') if self.codec not in VALID_CODECS: - raise DataFileException('Unknown codec: %s.' % self.codec) + raise DataFileException(f"Unknown codec: {self.codec}.") # get ready to read self._block_count = 0 @@ -127,10 +128,9 @@ def block_count(self): def get_meta(self, key): """Reports the value of a given metadata key. - Args: - key: Metadata key (string) to report the value of. - Returns: - Value associated to the metadata key, as bytes. + :param str key: Metadata key to report the value of. + :returns: Value associated to the metadata key, as bytes. + :rtype: bytes """ return self._meta.get(key) @@ -146,8 +146,7 @@ async def _read_header(self): # check magic number if header.get('magic') != MAGIC: - fail_msg = "Not an Avro data file: %s doesn't match %s." \ - % (header.get('magic'), MAGIC) + fail_msg = f"Not an Avro data file: {header.get('magic')} doesn't match {MAGIC!r}." raise schema.AvroException(fail_msg) # set metadata @@ -163,7 +162,7 @@ async def _read_block_header(self): await self.raw_decoder.skip_long() self._datum_decoder = self._raw_decoder else: - raise DataFileException("Unknown codec: %r" % self.codec) + raise DataFileException(f"Unknown codec: {self.codec!r}") async def _skip_sync(self): """ @@ -209,7 +208,3 @@ async def __anext__(self): def close(self): """Close this reader.""" self.reader.close() - - -if __name__ == '__main__': - raise Exception('Not a standalone module') diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/schema.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/schema.py index 18027e3c82c7..d5484abcdd9d 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/schema.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/avro/schema.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=too-many-lines +# pylint: disable=docstring-missing-return, docstring-missing-rtype, too-many-lines """Representation of Avro schemas. @@ -29,8 +29,6 @@ import json import logging import re - - logger = logging.getLogger(__name__) # ------------------------------------------------------------------------------ @@ -143,11 +141,12 @@ def __init__(self, data_type, other_props=None): other_props: Optional dictionary of additional properties. """ if data_type not in VALID_TYPES: - raise SchemaParseException('%r is not a valid Avro type.' % data_type) + raise SchemaParseException(f'{data_type!r} is not a valid Avro type.') # All properties of this schema, as a map: property name -> property value - self._props = {'type': data_type} + self._props = {} + self._props['type'] = data_type self._type = data_type if other_props: @@ -189,15 +188,13 @@ def __str__(self): """Returns: the JSON representation of this schema.""" return json.dumps(self.to_json(names=None)) + # Converts the schema object into its AVRO specification representation. + + # Schema types that have names (records, enums, and fixed) must be aware of not + # re-defining schemas that are already listed in the parameter names. @abc.abstractmethod def to_json(self, names): - """Converts the schema object into its AVRO specification representation. - - Schema types that have names (records, enums, and fixed) must - be aware of not re-defining schemas that are already listed - in the parameter names. - """ - raise Exception('Cannot run abstract method.') + ... # ------------------------------------------------------------------------------ @@ -213,7 +210,7 @@ def to_json(self, names): ) -class Name: +class Name(object): """Representation of an Avro name.""" def __init__(self, name, namespace=None): @@ -234,7 +231,7 @@ def __init__(self, name, namespace=None): match = _RE_FULL_NAME.match(self._fullname) if match is None: raise SchemaParseException( - 'Invalid absolute schema name: %r.' % self._fullname) + f'Invalid absolute schema name: {self._fullname!r}.') self._name = match.group(1) self._namespace = self._fullname[:-(len(self._name) + 1)] @@ -245,13 +242,12 @@ def __init__(self, name, namespace=None): self._namespace = namespace self._fullname = (self._name if (not self._namespace) else - '%s.%s' % (self._namespace, self._name)) + f'{self._namespace}.{self._name}') # Validate the fullname: if _RE_FULL_NAME.match(self._fullname) is None: - raise SchemaParseException( - 'Invalid schema name %r inferred from name %r and namespace %r.' - % (self._fullname, self._name, self._namespace)) + raise SchemaParseException(f"Invalid schema name {self._fullname!r} inferred from " + f"name {self._name!r} and namespace {self._namespace!r}.") def __eq__(self, other): if not isinstance(other, Name): @@ -305,21 +301,19 @@ def default_namespace(self): def new_with_default_namespace(self, namespace): """Creates a new name tracker from this tracker, but with a new default ns. - Args: - namespace: New default namespace to use. - Returns: - New name tracker with the specified default namespace. + :param Any namespace: New default namespace to use. + :returns: New name tracker with the specified default namespace. + :rtype: Names """ return Names(names=self._names, default_namespace=namespace) def get_name(self, name, namespace=None): """Resolves the Avro name according to this name tracker's state. - Args: - name: Name to resolve (absolute or relative). - namespace: Optional explicit namespace. - Returns: - The specified name, resolved according to this tracker. + :param Any name: Name to resolve (absolute or relative). + :param Optional[Any] namespace: Optional explicit namespace. + :returns: The specified name, resolved according to this tracker. + :rtype: Name """ if namespace is None: namespace = self._default_namespace @@ -328,19 +322,16 @@ def get_name(self, name, namespace=None): def get_schema(self, name, namespace=None): """Resolves an Avro schema by name. - Args: - name: Name (relative or absolute) of the Avro schema to look up. - namespace: Optional explicit namespace. - Returns: - The schema with the specified name, if any, or None. + :param Any name: Name (absolute or relative) of the Avro schema to look up. + :param Optional[Any] namespace: Optional explicit namespace. + :returns: The schema with the specified name, if any, or None + :rtype: Union[Any, None] """ avro_name = self.get_name(name=name, namespace=namespace) return self._names.get(avro_name.fullname, None) + # Given a properties, return properties with namespace removed if it matches the own default namespace def prune_namespace(self, properties): - """given a properties, return properties with namespace removed if - it matches the own default namespace - """ if self.default_namespace is None: # I have no default -- no change return properties @@ -358,15 +349,14 @@ def prune_namespace(self, properties): def register(self, schema): """Registers a new named schema in this tracker. - Args: - schema: Named Avro schema to register in this tracker. + :param Any schema: Named Avro schema to register in this tracker. """ if schema.fullname in VALID_TYPES: raise SchemaParseException( - '%s is a reserved type name.' % schema.fullname) + f'{schema.fullname} is a reserved type name.') if schema.fullname in self.names: raise SchemaParseException( - 'Avro name %r already exists.' % schema.fullname) + f'Avro name {schema.fullname!r} already exists.') logger.log(DEBUG_VERBOSE, 'Register new name for %r', schema.fullname) self._names[schema.fullname] = schema @@ -398,7 +388,7 @@ def __init__( names: Tracker to resolve and register Avro names. other_props: Optional map of additional properties of the schema. """ - assert (data_type in NAMED_TYPES), ('Invalid named type: %r' % data_type) + assert (data_type in NAMED_TYPES), (f'Invalid named type: {data_type!r}') self._avro_name = names.get_name(name=name, namespace=namespace) super(NamedSchema, self).__init__(data_type, other_props) @@ -429,24 +419,21 @@ def fullname(self): def name_ref(self, names): """Reports this schema name relative to the specified name tracker. - Args: - names: Avro name tracker to relativize this schema name against. - Returns: - This schema name, relativized against the specified name tracker. + :param Any names: Avro name tracker to relativize this schema name against. + :returns: This schema name, relativized against the specified name tracker. + :rtype: Any """ if self.namespace == names.default_namespace: return self.name return self.fullname + # Converts the schema object into its AVRO specification representation. + + # Schema types that have names (records, enums, and fixed) must be aware + # of not re-defining schemas that are already listed in the parameter names. @abc.abstractmethod def to_json(self, names): - """Converts the schema object into its AVRO specification representation. - - Schema types that have names (records, enums, and fixed) must - be aware of not re-defining schemas that are already listed - in the parameter names. - """ - raise Exception('Cannot run abstract method.') + ... # ------------------------------------------------------------------------------ @@ -480,10 +467,10 @@ def __init__( doc: other_props: """ - if not isinstance(name, str) or not name: - raise SchemaParseException('Invalid record field name: %r.' % name) - if order is not None and order not in VALID_FIELD_SORT_ORDERS: - raise SchemaParseException('Invalid record field order: %r.' % order) + if (not isinstance(name, str)) or (not name): + raise SchemaParseException(f'Invalid record field name: {name!r}.') + if (order is not None) and (order not in VALID_FIELD_SORT_ORDERS): + raise SchemaParseException(f'Invalid record field order: {order!r}.') # All properties of this record field: self._props = {} @@ -576,7 +563,7 @@ def __init__(self, data_type, other_props=None): data_type: Type of the schema to construct. Must be primitive. """ if data_type not in PRIMITIVE_TYPES: - raise AvroException('%r is not a valid primitive type.' % data_type) + raise AvroException(f'{data_type!r} is not a valid primitive type.') super(PrimitiveSchema, self).__init__(data_type, other_props=other_props) @property @@ -672,7 +659,7 @@ def __init__( if (len(symbol_set) != len(symbols) or not all(map(lambda symbol: isinstance(symbol, str), symbols))): raise AvroException( - 'Invalid symbols for enum schema: %r.' % (symbols,)) + f'Invalid symbols for enum schema: {symbols!r}.') super(EnumSchema, self).__init__( data_type=ENUM, @@ -801,22 +788,19 @@ def __init__(self, schemas): filter(lambda schema: schema.type in NAMED_TYPES, self._schemas)) unique_names = frozenset(map(lambda schema: schema.fullname, named_branches)) if len(unique_names) != len(named_branches): - raise AvroException( - 'Invalid union branches with duplicate schema name:%s' - % ''.join(map(lambda schema: ('\n\t - %s' % schema), self._schemas))) + schemas = ''.join(map(lambda schema: (f'\n\t - {schema}'), self._schemas)) + raise AvroException(f'Invalid union branches with duplicate schema name:{schemas}') # Types are unique within unnamed schemas, and union is not allowed: unnamed_branches = tuple( filter(lambda schema: schema.type not in NAMED_TYPES, self._schemas)) unique_types = frozenset(map(lambda schema: schema.type, unnamed_branches)) if UNION in unique_types: - raise AvroException( - 'Invalid union branches contain other unions:%s' - % ''.join(map(lambda schema: ('\n\t - %s' % schema), self._schemas))) + schemas = ''.join(map(lambda schema: (f'\n\t - {schema}'), self._schemas)) + raise AvroException(f'Invalid union branches contain other unions:{schemas}') if len(unique_types) != len(unnamed_branches): - raise AvroException( - 'Invalid union branches with duplicate type:%s' - % ''.join(map(lambda schema: ('\n\t - %s' % schema), self._schemas))) + schemas = ''.join(map(lambda schema: (f'\n\t - {schema}'), self._schemas)) + raise AvroException(f'Invalid union branches with duplicate type:{schemas}') @property def schemas(self): @@ -874,11 +858,11 @@ class RecordSchema(NamedSchema): def _make_field(index, field_desc, names): """Builds field schemas from a list of field JSON descriptors. - Args: - index: 0-based index of the field in the record. - field_desc: JSON descriptors of a record field. - Return: - The field schema. + :param int index: 0-based index of the field in the record. + :param Any field_desc: JSON descriptors of a record field. + :param Any names: The names for this schema. + :returns: The field schema. + :rtype: Field """ field_schema = schema_from_json_data( json_data=field_desc['type'], @@ -900,14 +884,12 @@ def _make_field(index, field_desc, names): @staticmethod def make_field_list(field_desc_list, names): """Builds field schemas from a list of field JSON descriptors. - Guarantees field name unicity. - Args: - field_desc_list: collection of field JSON descriptors. - names: Avro schema tracker. - Yields - Field schemas. + :param Any field_desc_list: Collection of field JSON descriptors. + :param Any names: The names for this schema. + :returns: Field schemas. + :rtype: Field """ for index, field_desc in enumerate(field_desc_list): yield RecordSchema._make_field(index, field_desc, names) @@ -915,19 +897,17 @@ def make_field_list(field_desc_list, names): @staticmethod def _make_field_map(fields): """Builds the field map. - Guarantees field name unicity. - Args: - fields: iterable of field schema. - Returns: - A map of field schemas, indexed by name. + :param Any fields: Iterable of field schema. + :returns: A map of field schemas, indexed by name. + :rtype: Dict[Any, Any] """ field_map = {} for field in fields: if field.name in field_map: raise SchemaParseException( - 'Duplicate record field name %r.' % field.name) + f'Duplicate record field name {field.name!r}.') field_map[field.name] = field return field_map @@ -975,8 +955,9 @@ def __init__( ) else: raise SchemaParseException( - 'Invalid record type: %r.' % record_type) + f'Invalid record type: {record_type!r}.') + nested_names = [] if record_type in [RECORD, ERROR]: avro_name = names.get_name(name=name, namespace=namespace) nested_names = names.new_with_default_namespace(namespace=avro_name.namespace) @@ -1032,14 +1013,12 @@ def __eq__(self, that): def filter_keys_out(items, keys): """Filters a collection of (key, value) items. - Exclude any item whose key belongs to keys. - Args: - items: Dictionary of items to filter the keys out of. - keys: Keys to filter out. - Yields: - Filtered items. + :param Dict[Any, Any] items: Dictionary of items to filter the keys out of. + :param Dict[Any, Any] keys: Dictionary of keys to filter the extracted keys against. + :returns: Filtered items. + :rtype: Tuple(Any, Any) """ for key, value in items.items(): if key in keys: @@ -1057,9 +1036,7 @@ def _schema_from_json_string(json_string, names): # Look for a known named schema: schema = names.get_schema(name=json_string) if schema is None: - raise SchemaParseException( - 'Unknown named schema %r, known names: %r.' - % (json_string, sorted(names.names))) + raise SchemaParseException(f"Unknown named schema {json_string!r}, known names: {sorted(names.names)!r}.") return schema @@ -1074,7 +1051,7 @@ def _schema_from_json_object(json_object, names): data_type = json_object.get('type') if data_type is None: raise SchemaParseException( - 'Avro schema JSON descriptor has no "type" property: %r' % json_object) + f'Avro schema JSON descriptor has no "type" property: {json_object!r}') other_props = dict( filter_keys_out(items=json_object, keys=SCHEMA_RESERVED_PROPS)) @@ -1110,7 +1087,7 @@ def MakeFields(names): other_props=other_props, ) else: - raise Exception('Internal error: unknown type %r.' % data_type) + raise ValueError(f'Internal error: unknown type {data_type!r}.') elif data_type in VALID_TYPES: # Unnamed, non-primitive Avro type: @@ -1118,9 +1095,7 @@ def MakeFields(names): if data_type == ARRAY: items_desc = json_object.get('items') if items_desc is None: - raise SchemaParseException( - 'Invalid array schema descriptor with no "items" : %r.' - % json_object) + raise SchemaParseException(f'Invalid array schema descriptor with no "items" : {json_object!r}.') result = ArraySchema( items=schema_from_json_data(items_desc, names), other_props=other_props, @@ -1129,9 +1104,7 @@ def MakeFields(names): elif data_type == MAP: values_desc = json_object.get('values') if values_desc is None: - raise SchemaParseException( - 'Invalid map schema descriptor with no "values" : %r.' - % json_object) + raise SchemaParseException(f'Invalid map schema descriptor with no "values" : {json_object!r}.') result = MapSchema( values=schema_from_json_data(values_desc, names=names), other_props=other_props, @@ -1146,10 +1119,9 @@ def MakeFields(names): result = ErrorUnionSchema(schemas=error_schemas) else: - raise Exception('Internal error: unknown type %r.' % data_type) + raise ValueError(f'Internal error: unknown type {data_type!r}.') else: - raise SchemaParseException( - 'Invalid JSON descriptor for an Avro schema: %r' % json_object) + raise SchemaParseException(f'Invalid JSON descriptor for an Avro schema: {json_object!r}') return result @@ -1163,14 +1135,12 @@ def MakeFields(names): def schema_from_json_data(json_data, names=None): """Builds an Avro Schema from its JSON descriptor. + Raises SchemaParseException if the descriptor is invalid. - Args: - json_data: JSON data representing the descriptor of the Avro schema. - names: Optional tracker for Avro named schemas. - Returns: - The Avro schema parsed from the JSON descriptor. - Raises: - SchemaParseException: if the descriptor is invalid. + :param Any json_data: JSON data representing the descriptor of the Avro schema. + :param Any names: Optional tracker for Avro named schemas. + :returns: The Avro schema parsed from the JSON descriptor. + :rtype: Any """ if names is None: names = Names() @@ -1179,7 +1149,7 @@ def schema_from_json_data(json_data, names=None): parser = _JSONDataParserTypeMap.get(type(json_data)) if parser is None: raise SchemaParseException( - 'Invalid JSON descriptor for an Avro schema: %r.' % json_data) + f'Invalid JSON descriptor for an Avro schema: {json_data!r}.') return parser(json_data, names=names) @@ -1188,22 +1158,18 @@ def schema_from_json_data(json_data, names=None): def parse(json_string): """Constructs a Schema from its JSON descriptor in text form. + Raises SchemaParseException if a JSON parsing error is met, or if the JSON descriptor is invalid. - Args: - json_string: String representation of the JSON descriptor of the schema. - Returns: - The parsed schema. - Raises: - SchemaParseException: on JSON parsing error, - or if the JSON descriptor is invalid. + :param str json_string: String representation of the JSON descriptor of the schema. + :returns: The parsed schema. + :rtype: Any """ try: json_data = json.loads(json_string) except Exception as exn: raise SchemaParseException( - 'Error parsing schema from JSON: %r. ' - 'Error message: %r.' - % (json_string, exn)) + f'Error parsing schema from JSON: {json_string!r}. ' + f'Error message: {exn!r}.') from exn # Initialize the names object names = Names() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/base_client.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/base_client.py index 136284bd0ef7..9dc8d2ec3cc4 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/base_client.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/base_client.py @@ -3,92 +3,90 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - -from typing import ( # pylint: disable=unused-import - Union, - Optional, +import logging +import uuid +from typing import ( Any, - Iterable, + cast, Dict, - List, - Type, + Iterator, + Optional, Tuple, TYPE_CHECKING, + Union, ) -import logging - -try: - from urllib.parse import parse_qs, quote -except ImportError: - from urlparse import parse_qs # type: ignore - from urllib2 import quote # type: ignore - -import six +from urllib.parse import parse_qs, quote -from azure.core.configuration import Configuration -from azure.core.credentials import AzureSasCredential +from azure.core.credentials import AzureSasCredential, AzureNamedKeyCredential, TokenCredential from azure.core.exceptions import HttpResponseError from azure.core.pipeline import Pipeline -from azure.core.pipeline.transport import RequestsTransport, HttpTransport +from azure.core.pipeline.transport import HttpTransport, RequestsTransport # pylint: disable=non-abstract-transport-import, no-name-in-module from azure.core.pipeline.policies import ( - RedirectPolicy, + AzureSasCredentialPolicy, ContentDecodePolicy, - BearerTokenCredentialPolicy, - ProxyPolicy, DistributedTracingPolicy, HttpLoggingPolicy, + ProxyPolicy, + RedirectPolicy, UserAgentPolicy, - AzureSasCredentialPolicy ) -from .constants import STORAGE_OAUTH_SCOPE, SERVICE_HOST_BASE, CONNECTION_TIMEOUT, READ_TIMEOUT -from .models import LocationMode from .authentication import SharedKeyCredentialPolicy -from .shared_access_signature import QueryStringConstants +from .constants import CONNECTION_TIMEOUT, DEFAULT_OAUTH_SCOPE, READ_TIMEOUT, SERVICE_HOST_BASE, STORAGE_OAUTH_SCOPE +from .models import LocationMode, StorageConfiguration from .policies import ( - StorageHeadersPolicy, + ExponentialRetry, + QueueMessagePolicy, + StorageBearerTokenCredentialPolicy, StorageContentValidation, + StorageHeadersPolicy, + StorageHosts, + StorageLoggingPolicy, StorageRequestHook, StorageResponseHook, - StorageLoggingPolicy, - StorageHosts, - QueueMessagePolicy, - ExponentialRetry, ) +from .request_handlers import serialize_batch_body, _get_batch_request_delimiter +from .response_handlers import PartialBatchErrorException, process_storage_error +from .shared_access_signature import QueryStringConstants from .._version import VERSION -from .response_handlers import process_storage_error, PartialBatchErrorException +from .._shared_access_signature import _is_credential_sastoken +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + from azure.core.pipeline.transport import HttpRequest, HttpResponse # pylint: disable=C4756 _LOGGER = logging.getLogger(__name__) _SERVICE_PARAMS = { - "blob": {"primary": "BlobEndpoint", "secondary": "BlobSecondaryEndpoint"}, - "queue": {"primary": "QueueEndpoint", "secondary": "QueueSecondaryEndpoint"}, - "file": {"primary": "FileEndpoint", "secondary": "FileSecondaryEndpoint"}, - "dfs": {"primary": "BlobEndpoint", "secondary": "BlobEndpoint"}, + "blob": {"primary": "BLOBENDPOINT", "secondary": "BLOBSECONDARYENDPOINT"}, + "queue": {"primary": "QUEUEENDPOINT", "secondary": "QUEUESECONDARYENDPOINT"}, + "file": {"primary": "FILEENDPOINT", "secondary": "FILESECONDARYENDPOINT"}, + "dfs": {"primary": "BLOBENDPOINT", "secondary": "BLOBENDPOINT"}, } -class StorageAccountHostsMixin(object): # pylint: disable=too-many-instance-attributes +class StorageAccountHostsMixin(object): + _client: Any def __init__( self, - parsed_url, # type: Any - service, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None + parsed_url: Any, + service: str, + credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "AsyncTokenCredential", TokenCredential]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: self._location_mode = kwargs.get("_location_mode", LocationMode.PRIMARY) self._hosts = kwargs.get("_hosts") self.scheme = parsed_url.scheme + self._is_localhost = False if service not in ["blob", "queue", "file-share", "dfs"]: - raise ValueError("Invalid service: {}".format(service)) + raise ValueError(f"Invalid service: {service}") service_name = service.split('-')[0] - account = parsed_url.netloc.split(".{}.core.".format(service_name)) + account = parsed_url.netloc.split(f".{service_name}.core.") self.account_name = account[0] if len(account) > 1 else None if not self.account_name and parsed_url.netloc.startswith("localhost") \ or parsed_url.netloc.startswith("127.0.0.1"): + self._is_localhost = True self.account_name = parsed_url.path.strip("/") self.credential = _format_shared_key_credential(self.account_name, credential) @@ -98,8 +96,7 @@ def __init__( secondary_hostname = None if hasattr(self.credential, "account_name"): self.account_name = self.credential.account_name - secondary_hostname = "{}-secondary.{}.{}".format( - self.credential.account_name, service_name, SERVICE_HOST_BASE) + secondary_hostname = f"{self.credential.account_name}-secondary.{service_name}.{SERVICE_HOST_BASE}" if not self._hosts: if len(account) > 1: @@ -109,10 +106,8 @@ def __init__( primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip('/') self._hosts = {LocationMode.PRIMARY: primary_hostname, LocationMode.SECONDARY: secondary_hostname} - self.require_encryption = kwargs.get("require_encryption", False) - self.key_encryption_key = kwargs.get("key_encryption_key") - self.key_resolver_function = kwargs.get("key_resolver_function") - self._config, self._pipeline = self._create_pipeline(self.credential, storage_sdk=service, **kwargs) + self._sdk_moniker = f"storage-{service}/{VERSION}" + self._config, self._pipeline = self._create_pipeline(self.credential, sdk_moniker=self._sdk_moniker, **kwargs) def __enter__(self): self._client.__enter__() @@ -133,6 +128,8 @@ def url(self): This could be either the primary endpoint, or the secondary endpoint depending on the current :func:`location_mode`. + :returns: The full endpoint URL to this entity, including SAS token if used. + :rtype: str """ return self._format_url(self._hosts[self._location_mode]) @@ -140,7 +137,7 @@ def url(self): def primary_endpoint(self): """The full primary endpoint URL. - :type: str + :rtype: str """ return self._format_url(self._hosts[LocationMode.PRIMARY]) @@ -148,7 +145,7 @@ def primary_endpoint(self): def primary_hostname(self): """The hostname of the primary endpoint. - :type: str + :rtype: str """ return self._hosts[LocationMode.PRIMARY] @@ -159,7 +156,7 @@ def secondary_endpoint(self): If not available a ValueError will be raised. To explicitly specify a secondary hostname, use the optional `secondary_hostname` keyword argument on instantiation. - :type: str + :rtype: str :raise ValueError: """ if not self._hosts[LocationMode.SECONDARY]: @@ -173,7 +170,7 @@ def secondary_hostname(self): If not available this will be None. To explicitly specify a secondary hostname, use the optional `secondary_hostname` keyword argument on instantiation. - :type: str or None + :rtype: Optional[str] """ return self._hosts[LocationMode.SECONDARY] @@ -183,7 +180,7 @@ def location_mode(self): By default this will be "primary". Options include "primary" and "secondary". - :type: str + :rtype: str """ return self._location_mode @@ -194,52 +191,64 @@ def location_mode(self, value): self._location_mode = value self._client._config.url = self.url # pylint: disable=protected-access else: - raise ValueError("No host URL for location mode: {}".format(value)) + raise ValueError(f"No host URL for location mode: {value}") @property def api_version(self): """The version of the Storage API used for requests. - :type: str + :rtype: str """ return self._client._config.version # pylint: disable=protected-access - def _format_query_string(self, sas_token, credential, snapshot=None, share_snapshot=None): + def _format_query_string( + self, sas_token: Optional[str], + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", TokenCredential]], # pylint: disable=line-too-long + snapshot: Optional[str] = None, + share_snapshot: Optional[str] = None + ) -> Tuple[str, Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", TokenCredential]]]: # pylint: disable=line-too-long query_str = "?" if snapshot: - query_str += "snapshot={}&".format(self.snapshot) + query_str += f"snapshot={snapshot}&" if share_snapshot: - query_str += "sharesnapshot={}&".format(self.snapshot) + query_str += f"sharesnapshot={share_snapshot}&" if sas_token and isinstance(credential, AzureSasCredential): raise ValueError( "You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature.") - if sas_token and not credential: - query_str += sas_token - elif is_credential_sastoken(credential): + if _is_credential_sastoken(credential): + credential = cast(str, credential) query_str += credential.lstrip("?") credential = None + elif sas_token: + query_str += sas_token return query_str.rstrip("?&"), credential - def _create_pipeline(self, credential, **kwargs): - # type: (Any, **Any) -> Tuple[Configuration, Pipeline] - self._credential_policy = None + def _create_pipeline( + self, credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Tuple[StorageConfiguration, Pipeline]: + self._credential_policy: Any = None if hasattr(credential, "get_token"): - self._credential_policy = BearerTokenCredentialPolicy(credential, STORAGE_OAUTH_SCOPE) + if kwargs.get('audience'): + audience = str(kwargs.pop('audience')).rstrip('/') + DEFAULT_OAUTH_SCOPE + else: + audience = STORAGE_OAUTH_SCOPE + self._credential_policy = StorageBearerTokenCredentialPolicy(cast(TokenCredential, credential), audience) elif isinstance(credential, SharedKeyCredentialPolicy): self._credential_policy = credential elif isinstance(credential, AzureSasCredential): self._credential_policy = AzureSasCredentialPolicy(credential) elif credential is not None: - raise TypeError("Unsupported credential: {}".format(type(credential))) + raise TypeError(f"Unsupported credential: {type(credential)}") config = kwargs.get("_configuration") or create_configuration(**kwargs) if kwargs.get("_pipeline"): return config, kwargs["_pipeline"] - config.transport = kwargs.get("transport") # type: ignore + transport = kwargs.get("transport") kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT) kwargs.setdefault("read_timeout", READ_TIMEOUT) - if not config.transport: - config.transport = RequestsTransport(**kwargs) + if not transport: + transport = RequestsTransport(**kwargs) policies = [ QueueMessagePolicy(), config.proxy_policy, @@ -258,26 +267,34 @@ def _create_pipeline(self, credential, **kwargs): HttpLoggingPolicy(**kwargs) ] if kwargs.get("_additional_pipeline_policies"): - policies = policies + kwargs.get("_additional_pipeline_policies") - return config, Pipeline(config.transport, policies=policies) + policies = policies + kwargs.get("_additional_pipeline_policies") # type: ignore + config.transport = transport # type: ignore + return config, Pipeline(transport, policies=policies) def _batch_send( - self, *reqs, # type: HttpRequest - **kwargs - ): + self, + *reqs: "HttpRequest", + **kwargs: Any + ) -> Iterator["HttpResponse"]: """Given a series of request, do a Storage batch call. + + :param HttpRequest reqs: A collection of HttpRequest objects. + :returns: An iterator of HttpResponse objects. + :rtype: Iterator[HttpResponse] """ # Pop it here, so requests doesn't feel bad about additional kwarg raise_on_any_failure = kwargs.pop("raise_on_any_failure", True) + batch_id = str(uuid.uuid1()) + request = self._client._client.post( # pylint: disable=protected-access - url='{}://{}/?comp=batch{}{}'.format( - self.scheme, - self.primary_hostname, - kwargs.pop('sas', ""), - kwargs.pop('timeout', "") + url=( + f'{self.scheme}://{self.primary_hostname}/' + f"{kwargs.pop('path', '')}?{kwargs.pop('restype', '')}" + f"comp=batch{kwargs.pop('sas', '')}{kwargs.pop('timeout', '')}" ), headers={ - 'x-ms-version': self.api_version + 'x-ms-version': self.api_version, + "Content-Type": "multipart/mixed; boundary=" + _get_batch_request_delimiter(batch_id, False, False) } ) @@ -291,10 +308,17 @@ def _batch_send( enforce_https=False ) + Pipeline._prepare_multipart_mixed_request(request) # pylint: disable=protected-access + body = serialize_batch_body(request.multipart_mixed_info[0], batch_id) + request.set_bytes_body(body) + + temp = request.multipart_mixed_info + request.multipart_mixed_info = None pipeline_response = self._pipeline.run( request, **kwargs ) response = pipeline_response.http_response + request.multipart_mixed_info = temp try: if response.status_code not in [202]: @@ -309,10 +333,11 @@ def _batch_send( ) raise error return iter(parts) - return parts + return parts # type: ignore [no-any-return] except HttpResponseError as error: process_storage_error(error) + class TransportWrapper(HttpTransport): """Wrapper class that ensures that an inner client created by a `get_client` method does not close the outer transport for the parent @@ -333,12 +358,15 @@ def close(self): def __enter__(self): pass - def __exit__(self, *args): # pylint: disable=arguments-differ + def __exit__(self, *args): pass -def _format_shared_key_credential(account_name, credential): - if isinstance(credential, six.string_types): +def _format_shared_key_credential( + account_name: Optional[str], + credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "AsyncTokenCredential", TokenCredential]] = None # pylint: disable=line-too-long +) -> Any: + if isinstance(credential, str): if not account_name: raise ValueError("Unable to determine account name for shared key credential.") credential = {"account_name": account_name, "account_key": credential} @@ -348,23 +376,29 @@ def _format_shared_key_credential(account_name, credential): if "account_key" not in credential: raise ValueError("Shared key credential missing 'account_key") return SharedKeyCredentialPolicy(**credential) + if isinstance(credential, AzureNamedKeyCredential): + return SharedKeyCredentialPolicy(credential.named_key.name, credential.named_key.key) return credential -def parse_connection_str(conn_str, credential, service): +def parse_connection_str( + conn_str: str, + credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]], + service: str +) -> Tuple[str, Optional[str], Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]]]: # pylint: disable=line-too-long conn_str = conn_str.rstrip(";") - conn_settings = [s.split("=", 1) for s in conn_str.split(";")] - if any(len(tup) != 2 for tup in conn_settings): + conn_settings_list = [s.split("=", 1) for s in conn_str.split(";")] + if any(len(tup) != 2 for tup in conn_settings_list): raise ValueError("Connection string is either blank or malformed.") - conn_settings = dict(conn_settings) + conn_settings = dict((key.upper(), val) for key, val in conn_settings_list) endpoints = _SERVICE_PARAMS[service] primary = None secondary = None if not credential: try: - credential = {"account_name": conn_settings["AccountName"], "account_key": conn_settings["AccountKey"]} + credential = {"account_name": conn_settings["ACCOUNTNAME"], "account_key": conn_settings["ACCOUNTKEY"]} except KeyError: - credential = conn_settings.get("SharedAccessSignature") + credential = conn_settings.get("SHAREDACCESSSIGNATURE") if endpoints["primary"] in conn_settings: primary = conn_settings[endpoints["primary"]] if endpoints["secondary"] in conn_settings: @@ -373,77 +407,52 @@ def parse_connection_str(conn_str, credential, service): if endpoints["secondary"] in conn_settings: raise ValueError("Connection string specifies only secondary endpoint.") try: - primary = "{}://{}.{}.{}".format( - conn_settings["DefaultEndpointsProtocol"], - conn_settings["AccountName"], - service, - conn_settings["EndpointSuffix"], + primary =( + f"{conn_settings['DEFAULTENDPOINTSPROTOCOL']}://" + f"{conn_settings['ACCOUNTNAME']}.{service}.{conn_settings['ENDPOINTSUFFIX']}" ) - secondary = "{}-secondary.{}.{}".format( - conn_settings["AccountName"], service, conn_settings["EndpointSuffix"] + secondary = ( + f"{conn_settings['ACCOUNTNAME']}-secondary." + f"{service}.{conn_settings['ENDPOINTSUFFIX']}" ) except KeyError: pass if not primary: try: - primary = "https://{}.{}.{}".format( - conn_settings["AccountName"], service, conn_settings.get("EndpointSuffix", SERVICE_HOST_BASE) + primary = ( + f"https://{conn_settings['ACCOUNTNAME']}." + f"{service}.{conn_settings.get('ENDPOINTSUFFIX', SERVICE_HOST_BASE)}" ) - except KeyError: - raise ValueError("Connection string missing required connection details.") + except KeyError as exc: + raise ValueError("Connection string missing required connection details.") from exc + if service == "dfs": + primary = primary.replace(".blob.", ".dfs.") + if secondary: + secondary = secondary.replace(".blob.", ".dfs.") return primary, secondary, credential -def create_configuration(**kwargs): - # type: (**Any) -> Configuration - config = Configuration(**kwargs) +def create_configuration(**kwargs: Any) -> StorageConfiguration: + # Backwards compatibility if someone is not passing sdk_moniker + if not kwargs.get("sdk_moniker"): + kwargs["sdk_moniker"] = f"storage-{kwargs.pop('storage_sdk')}/{VERSION}" + config = StorageConfiguration(**kwargs) config.headers_policy = StorageHeadersPolicy(**kwargs) - config.user_agent_policy = UserAgentPolicy( - sdk_moniker="storage-{}/{}".format(kwargs.pop('storage_sdk'), VERSION), **kwargs) + config.user_agent_policy = UserAgentPolicy(**kwargs) config.retry_policy = kwargs.get("retry_policy") or ExponentialRetry(**kwargs) config.logging_policy = StorageLoggingPolicy(**kwargs) config.proxy_policy = ProxyPolicy(**kwargs) - - # Storage settings - config.max_single_put_size = kwargs.get("max_single_put_size", 64 * 1024 * 1024) - config.copy_polling_interval = 15 - - # Block blob uploads - config.max_block_size = kwargs.get("max_block_size", 4 * 1024 * 1024) - config.min_large_block_upload_threshold = kwargs.get("min_large_block_upload_threshold", 4 * 1024 * 1024 + 1) - config.use_byte_buffer = kwargs.get("use_byte_buffer", False) - - # Page blob uploads - config.max_page_size = kwargs.get("max_page_size", 4 * 1024 * 1024) - - # Blob downloads - config.max_single_get_size = kwargs.get("max_single_get_size", 32 * 1024 * 1024) - config.max_chunk_get_size = kwargs.get("max_chunk_get_size", 4 * 1024 * 1024) - - # File uploads - config.max_range_size = kwargs.get("max_range_size", 4 * 1024 * 1024) return config -def parse_query(query_str): +def parse_query(query_str: str) -> Tuple[Optional[str], Optional[str]]: sas_values = QueryStringConstants.to_list() parsed_query = {k: v[0] for k, v in parse_qs(query_str).items()} - sas_params = ["{}={}".format(k, quote(v, safe='')) for k, v in parsed_query.items() if k in sas_values] + sas_params = [f"{k}={quote(v, safe='')}" for k, v in parsed_query.items() if k in sas_values] sas_token = None if sas_params: sas_token = "&".join(sas_params) snapshot = parsed_query.get("snapshot") or parsed_query.get("sharesnapshot") return snapshot, sas_token - - -def is_credential_sastoken(credential): - if not credential or not isinstance(credential, six.string_types): - return False - - sas_values = QueryStringConstants.to_list() - parsed_query = parse_qs(credential.lstrip("?")) - if parsed_query and all([k in sas_values for k in parsed_query.keys()]): - return True - return False diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/base_client_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/base_client_async.py index 8834292ad688..6186b29db107 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/base_client_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/base_client_async.py @@ -3,47 +3,51 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# mypy: disable-error-code="attr-defined" -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, Dict, List, Type, Tuple, - TYPE_CHECKING -) import logging +from typing import Any, cast, Dict, Optional, Tuple, TYPE_CHECKING, Union -from azure.core.credentials import AzureSasCredential -from azure.core.pipeline import AsyncPipeline from azure.core.async_paging import AsyncList +from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential +from azure.core.credentials_async import AsyncTokenCredential from azure.core.exceptions import HttpResponseError +from azure.core.pipeline import AsyncPipeline from azure.core.pipeline.policies import ( - ContentDecodePolicy, - AsyncBearerTokenCredentialPolicy, AsyncRedirectPolicy, + AzureSasCredentialPolicy, + ContentDecodePolicy, DistributedTracingPolicy, HttpLoggingPolicy, - AzureSasCredentialPolicy, ) from azure.core.pipeline.transport import AsyncHttpTransport -from .constants import STORAGE_OAUTH_SCOPE, CONNECTION_TIMEOUT, READ_TIMEOUT from .authentication import SharedKeyCredentialPolicy from .base_client import create_configuration +from .constants import CONNECTION_TIMEOUT, DEFAULT_OAUTH_SCOPE, READ_TIMEOUT, SERVICE_HOST_BASE, STORAGE_OAUTH_SCOPE +from .models import StorageConfiguration from .policies import ( + QueueMessagePolicy, StorageContentValidation, - StorageRequestHook, - StorageHosts, StorageHeadersPolicy, - QueueMessagePolicy + StorageHosts, + StorageRequestHook, ) -from .policies_async import AsyncStorageResponseHook - -from .response_handlers import process_storage_error, PartialBatchErrorException +from .policies_async import AsyncStorageBearerTokenCredentialPolicy, AsyncStorageResponseHook +from .response_handlers import PartialBatchErrorException, process_storage_error +from .._shared_access_signature import _is_credential_sastoken if TYPE_CHECKING: - from azure.core.pipeline import Pipeline - from azure.core.pipeline.transport import HttpRequest - from azure.core.configuration import Configuration + from azure.core.pipeline.transport import HttpRequest, HttpResponse # pylint: disable=C4756 _LOGGER = logging.getLogger(__name__) +_SERVICE_PARAMS = { + "blob": {"primary": "BLOBENDPOINT", "secondary": "BLOBSECONDARYENDPOINT"}, + "queue": {"primary": "QUEUEENDPOINT", "secondary": "QUEUESECONDARYENDPOINT"}, + "file": {"primary": "FILEENDPOINT", "secondary": "FILESECONDARYENDPOINT"}, + "dfs": {"primary": "BLOBENDPOINT", "secondary": "BLOBENDPOINT"}, +} + class AsyncStorageAccountHostsMixin(object): @@ -66,64 +70,101 @@ async def close(self): """ await self._client.close() - def _create_pipeline(self, credential, **kwargs): - # type: (Any, **Any) -> Tuple[Configuration, Pipeline] - self._credential_policy = None + def _format_query_string( + self, sas_token: Optional[str], + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", AsyncTokenCredential]], # pylint: disable=line-too-long + snapshot: Optional[str] = None, + share_snapshot: Optional[str] = None + ) -> Tuple[str, Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", AsyncTokenCredential]]]: # pylint: disable=line-too-long + query_str = "?" + if snapshot: + query_str += f"snapshot={snapshot}&" + if share_snapshot: + query_str += f"sharesnapshot={share_snapshot}&" + if sas_token and isinstance(credential, AzureSasCredential): + raise ValueError( + "You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature.") + if _is_credential_sastoken(credential): + query_str += credential.lstrip("?") # type: ignore [union-attr] + credential = None + elif sas_token: + query_str += sas_token + return query_str.rstrip("?&"), credential + + def _create_pipeline( + self, credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Tuple[StorageConfiguration, AsyncPipeline]: + self._credential_policy: Optional[ + Union[AsyncStorageBearerTokenCredentialPolicy, + SharedKeyCredentialPolicy, + AzureSasCredentialPolicy]] = None if hasattr(credential, 'get_token'): - self._credential_policy = AsyncBearerTokenCredentialPolicy(credential, STORAGE_OAUTH_SCOPE) + if kwargs.get('audience'): + audience = str(kwargs.pop('audience')).rstrip('/') + DEFAULT_OAUTH_SCOPE + else: + audience = STORAGE_OAUTH_SCOPE + self._credential_policy = AsyncStorageBearerTokenCredentialPolicy( + cast(AsyncTokenCredential, credential), audience) elif isinstance(credential, SharedKeyCredentialPolicy): self._credential_policy = credential elif isinstance(credential, AzureSasCredential): self._credential_policy = AzureSasCredentialPolicy(credential) elif credential is not None: - raise TypeError("Unsupported credential: {}".format(type(credential))) + raise TypeError(f"Unsupported credential: {type(credential)}") config = kwargs.get('_configuration') or create_configuration(**kwargs) if kwargs.get('_pipeline'): return config, kwargs['_pipeline'] - config.transport = kwargs.get('transport') # type: ignore + transport = kwargs.get('transport') kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT) kwargs.setdefault("read_timeout", READ_TIMEOUT) - if not config.transport: + if not transport: try: - from azure.core.pipeline.transport import AioHttpTransport - except ImportError: - raise ImportError("Unable to create async transport. Please check aiohttp is installed.") - config.transport = AioHttpTransport(**kwargs) + from azure.core.pipeline.transport import AioHttpTransport # pylint: disable=non-abstract-transport-import + except ImportError as exc: + raise ImportError("Unable to create async transport. Please check aiohttp is installed.") from exc + transport = AioHttpTransport(**kwargs) + hosts = self._hosts policies = [ QueueMessagePolicy(), - config.headers_policy, config.proxy_policy, config.user_agent_policy, StorageContentValidation(), - StorageRequestHook(**kwargs), - self._credential_policy, ContentDecodePolicy(response_encoding="utf-8"), AsyncRedirectPolicy(**kwargs), - StorageHosts(hosts=self._hosts, **kwargs), # type: ignore + StorageHosts(hosts=hosts, **kwargs), config.retry_policy, + config.headers_policy, + StorageRequestHook(**kwargs), + self._credential_policy, config.logging_policy, AsyncStorageResponseHook(**kwargs), DistributedTracingPolicy(**kwargs), HttpLoggingPolicy(**kwargs), ] if kwargs.get("_additional_pipeline_policies"): - policies = policies + kwargs.get("_additional_pipeline_policies") - return config, AsyncPipeline(config.transport, policies=policies) + policies = policies + kwargs.get("_additional_pipeline_policies") #type: ignore + config.transport = transport #type: ignore + return config, AsyncPipeline(transport, policies=policies) #type: ignore async def _batch_send( - self, *reqs: 'HttpRequest', - **kwargs - ): + self, + *reqs: "HttpRequest", + **kwargs: Any + ) -> AsyncList["HttpResponse"]: """Given a series of request, do a Storage batch call. + + :param HttpRequest reqs: A collection of HttpRequest objects. + :returns: An AsyncList of HttpResponse objects. + :rtype: AsyncList[HttpResponse] """ # Pop it here, so requests doesn't feel bad about additional kwarg raise_on_any_failure = kwargs.pop("raise_on_any_failure", True) request = self._client._client.post( # pylint: disable=protected-access - url='{}://{}/?comp=batch{}{}'.format( - self.scheme, - self.primary_hostname, - kwargs.pop('sas', None), - kwargs.pop('timeout', None) + url=( + f'{self.scheme}://{self.primary_hostname}/' + f"{kwargs.pop('path', '')}?{kwargs.pop('restype', '')}" + f"comp=batch{kwargs.pop('sas', '')}{kwargs.pop('timeout', '')}" ), headers={ 'x-ms-version': self.api_version @@ -132,7 +173,7 @@ async def _batch_send( policies = [StorageHeadersPolicy()] if self._credential_policy: - policies.append(self._credential_policy) + policies.append(self._credential_policy) # type: ignore request.set_multipart_mixed( *reqs, @@ -160,10 +201,60 @@ async def _batch_send( ) raise error return AsyncList(parts_list) - return parts + return parts # type: ignore [no-any-return] except HttpResponseError as error: process_storage_error(error) +def parse_connection_str( + conn_str: str, + credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]], + service: str +) -> Tuple[str, Optional[str], Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]]]: # pylint: disable=line-too-long + conn_str = conn_str.rstrip(";") + conn_settings_list = [s.split("=", 1) for s in conn_str.split(";")] + if any(len(tup) != 2 for tup in conn_settings_list): + raise ValueError("Connection string is either blank or malformed.") + conn_settings = dict((key.upper(), val) for key, val in conn_settings_list) + endpoints = _SERVICE_PARAMS[service] + primary = None + secondary = None + if not credential: + try: + credential = {"account_name": conn_settings["ACCOUNTNAME"], "account_key": conn_settings["ACCOUNTKEY"]} + except KeyError: + credential = conn_settings.get("SHAREDACCESSSIGNATURE") + if endpoints["primary"] in conn_settings: + primary = conn_settings[endpoints["primary"]] + if endpoints["secondary"] in conn_settings: + secondary = conn_settings[endpoints["secondary"]] + else: + if endpoints["secondary"] in conn_settings: + raise ValueError("Connection string specifies only secondary endpoint.") + try: + primary =( + f"{conn_settings['DEFAULTENDPOINTSPROTOCOL']}://" + f"{conn_settings['ACCOUNTNAME']}.{service}.{conn_settings['ENDPOINTSUFFIX']}" + ) + secondary = ( + f"{conn_settings['ACCOUNTNAME']}-secondary." + f"{service}.{conn_settings['ENDPOINTSUFFIX']}" + ) + except KeyError: + pass + + if not primary: + try: + primary = ( + f"https://{conn_settings['ACCOUNTNAME']}." + f"{service}.{conn_settings.get('ENDPOINTSUFFIX', SERVICE_HOST_BASE)}" + ) + except KeyError as exc: + raise ValueError("Connection string missing required connection details.") from exc + if service == "dfs": + primary = primary.replace(".blob.", ".dfs.") + if secondary: + secondary = secondary.replace(".blob.", ".dfs.") + return primary, secondary, credential class AsyncTransportWrapper(AsyncHttpTransport): """Wrapper class that ensures that an inner client created @@ -185,5 +276,5 @@ async def close(self): async def __aenter__(self): pass - async def __aexit__(self, *args): # pylint: disable=arguments-differ + async def __aexit__(self, *args): pass diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/constants.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/constants.py index bdee829383cd..0b4b029a2d1b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/constants.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/constants.py @@ -4,24 +4,16 @@ # license information. # -------------------------------------------------------------------------- -import sys -from .._generated import AzureBlobStorage +from .._serialize import _SUPPORTED_API_VERSIONS -X_MS_VERSION = AzureBlobStorage(url="get_api_version")._config.version # pylint: disable=protected-access +X_MS_VERSION = _SUPPORTED_API_VERSIONS[-1] -# Socket timeout in seconds +# Default socket timeouts, in seconds CONNECTION_TIMEOUT = 20 -READ_TIMEOUT = 20 - -# for python 3.5+, there was a change to the definition of the socket timeout (as far as socket.sendall is concerned) -# The socket timeout is now the maximum total duration to send all data. -if sys.version_info >= (3, 5): - # the timeout to connect is 20 seconds, and the read timeout is 80000 seconds - # the 80000 seconds was calculated with: - # 4000MB (max block size)/ 50KB/s (an arbitrarily chosen minimum upload speed) - READ_TIMEOUT = 80000 +READ_TIMEOUT = 60 +DEFAULT_OAUTH_SCOPE = "/.default" STORAGE_OAUTH_SCOPE = "https://storage.azure.com/.default" SERVICE_HOST_BASE = 'core.windows.net' diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/encryption.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/encryption.py deleted file mode 100644 index 56ac384d9efe..000000000000 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/encryption.py +++ /dev/null @@ -1,542 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -import os -from os import urandom -from json import ( - dumps, - loads, -) -from collections import OrderedDict - -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives.ciphers import Cipher -from cryptography.hazmat.primitives.ciphers.algorithms import AES -from cryptography.hazmat.primitives.ciphers.modes import CBC -from cryptography.hazmat.primitives.padding import PKCS7 - -from azure.core.exceptions import HttpResponseError - -from .._version import VERSION -from . import encode_base64, decode_base64_to_bytes - - -_ENCRYPTION_PROTOCOL_V1 = '1.0' -_ERROR_OBJECT_INVALID = \ - '{0} does not define a complete interface. Value of {1} is either missing or invalid.' - - -def _validate_not_none(param_name, param): - if param is None: - raise ValueError('{0} should not be None.'.format(param_name)) - - -def _validate_key_encryption_key_wrap(kek): - # Note that None is not callable and so will fail the second clause of each check. - if not hasattr(kek, 'wrap_key') or not callable(kek.wrap_key): - raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'wrap_key')) - if not hasattr(kek, 'get_kid') or not callable(kek.get_kid): - raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid')) - if not hasattr(kek, 'get_key_wrap_algorithm') or not callable(kek.get_key_wrap_algorithm): - raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_key_wrap_algorithm')) - - -class _EncryptionAlgorithm(object): - ''' - Specifies which client encryption algorithm is used. - ''' - AES_CBC_256 = 'AES_CBC_256' - - -class _WrappedContentKey: - ''' - Represents the envelope key details stored on the service. - ''' - - def __init__(self, algorithm, encrypted_key, key_id): - ''' - :param str algorithm: - The algorithm used for wrapping. - :param bytes encrypted_key: - The encrypted content-encryption-key. - :param str key_id: - The key-encryption-key identifier string. - ''' - - _validate_not_none('algorithm', algorithm) - _validate_not_none('encrypted_key', encrypted_key) - _validate_not_none('key_id', key_id) - - self.algorithm = algorithm - self.encrypted_key = encrypted_key - self.key_id = key_id - - -class _EncryptionAgent: - ''' - Represents the encryption agent stored on the service. - It consists of the encryption protocol version and encryption algorithm used. - ''' - - def __init__(self, encryption_algorithm, protocol): - ''' - :param _EncryptionAlgorithm encryption_algorithm: - The algorithm used for encrypting the message contents. - :param str protocol: - The protocol version used for encryption. - ''' - - _validate_not_none('encryption_algorithm', encryption_algorithm) - _validate_not_none('protocol', protocol) - - self.encryption_algorithm = str(encryption_algorithm) - self.protocol = protocol - - -class _EncryptionData: - ''' - Represents the encryption data that is stored on the service. - ''' - - def __init__(self, content_encryption_IV, encryption_agent, wrapped_content_key, - key_wrapping_metadata): - ''' - :param bytes content_encryption_IV: - The content encryption initialization vector. - :param _EncryptionAgent encryption_agent: - The encryption agent. - :param _WrappedContentKey wrapped_content_key: - An object that stores the wrapping algorithm, the key identifier, - and the encrypted key bytes. - :param dict key_wrapping_metadata: - A dict containing metadata related to the key wrapping. - ''' - - _validate_not_none('content_encryption_IV', content_encryption_IV) - _validate_not_none('encryption_agent', encryption_agent) - _validate_not_none('wrapped_content_key', wrapped_content_key) - - self.content_encryption_IV = content_encryption_IV - self.encryption_agent = encryption_agent - self.wrapped_content_key = wrapped_content_key - self.key_wrapping_metadata = key_wrapping_metadata - - -def _generate_encryption_data_dict(kek, cek, iv): - ''' - Generates and returns the encryption metadata as a dict. - - :param object kek: The key encryption key. See calling functions for more information. - :param bytes cek: The content encryption key. - :param bytes iv: The initialization vector. - :return: A dict containing all the encryption metadata. - :rtype: dict - ''' - # Encrypt the cek. - wrapped_cek = kek.wrap_key(cek) - - # Build the encryption_data dict. - # Use OrderedDict to comply with Java's ordering requirement. - wrapped_content_key = OrderedDict() - wrapped_content_key['KeyId'] = kek.get_kid() - wrapped_content_key['EncryptedKey'] = encode_base64(wrapped_cek) - wrapped_content_key['Algorithm'] = kek.get_key_wrap_algorithm() - - encryption_agent = OrderedDict() - encryption_agent['Protocol'] = _ENCRYPTION_PROTOCOL_V1 - encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_CBC_256 - - encryption_data_dict = OrderedDict() - encryption_data_dict['WrappedContentKey'] = wrapped_content_key - encryption_data_dict['EncryptionAgent'] = encryption_agent - encryption_data_dict['ContentEncryptionIV'] = encode_base64(iv) - encryption_data_dict['KeyWrappingMetadata'] = {'EncryptionLibrary': 'Python ' + VERSION} - - return encryption_data_dict - - -def _dict_to_encryption_data(encryption_data_dict): - ''' - Converts the specified dictionary to an EncryptionData object for - eventual use in decryption. - - :param dict encryption_data_dict: - The dictionary containing the encryption data. - :return: an _EncryptionData object built from the dictionary. - :rtype: _EncryptionData - ''' - try: - if encryption_data_dict['EncryptionAgent']['Protocol'] != _ENCRYPTION_PROTOCOL_V1: - raise ValueError("Unsupported encryption version.") - except KeyError: - raise ValueError("Unsupported encryption version.") - wrapped_content_key = encryption_data_dict['WrappedContentKey'] - wrapped_content_key = _WrappedContentKey(wrapped_content_key['Algorithm'], - decode_base64_to_bytes(wrapped_content_key['EncryptedKey']), - wrapped_content_key['KeyId']) - - encryption_agent = encryption_data_dict['EncryptionAgent'] - encryption_agent = _EncryptionAgent(encryption_agent['EncryptionAlgorithm'], - encryption_agent['Protocol']) - - if 'KeyWrappingMetadata' in encryption_data_dict: - key_wrapping_metadata = encryption_data_dict['KeyWrappingMetadata'] - else: - key_wrapping_metadata = None - - encryption_data = _EncryptionData(decode_base64_to_bytes(encryption_data_dict['ContentEncryptionIV']), - encryption_agent, - wrapped_content_key, - key_wrapping_metadata) - - return encryption_data - - -def _generate_AES_CBC_cipher(cek, iv): - ''' - Generates and returns an encryption cipher for AES CBC using the given cek and iv. - - :param bytes[] cek: The content encryption key for the cipher. - :param bytes[] iv: The initialization vector for the cipher. - :return: A cipher for encrypting in AES256 CBC. - :rtype: ~cryptography.hazmat.primitives.ciphers.Cipher - ''' - - backend = default_backend() - algorithm = AES(cek) - mode = CBC(iv) - return Cipher(algorithm, mode, backend) - - -def _validate_and_unwrap_cek(encryption_data, key_encryption_key=None, key_resolver=None): - ''' - Extracts and returns the content_encryption_key stored in the encryption_data object - and performs necessary validation on all parameters. - :param _EncryptionData encryption_data: - The encryption metadata of the retrieved value. - :param obj key_encryption_key: - The key_encryption_key used to unwrap the cek. Please refer to high-level service object - instance variables for more details. - :param func key_resolver: - A function used that, given a key_id, will return a key_encryption_key. Please refer - to high-level service object instance variables for more details. - :return: the content_encryption_key stored in the encryption_data object. - :rtype: bytes[] - ''' - - _validate_not_none('content_encryption_IV', encryption_data.content_encryption_IV) - _validate_not_none('encrypted_key', encryption_data.wrapped_content_key.encrypted_key) - - if _ENCRYPTION_PROTOCOL_V1 != encryption_data.encryption_agent.protocol: - raise ValueError('Encryption version is not supported.') - - content_encryption_key = None - - # If the resolver exists, give priority to the key it finds. - if key_resolver is not None: - key_encryption_key = key_resolver(encryption_data.wrapped_content_key.key_id) - - _validate_not_none('key_encryption_key', key_encryption_key) - if not hasattr(key_encryption_key, 'get_kid') or not callable(key_encryption_key.get_kid): - raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid')) - if not hasattr(key_encryption_key, 'unwrap_key') or not callable(key_encryption_key.unwrap_key): - raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'unwrap_key')) - if encryption_data.wrapped_content_key.key_id != key_encryption_key.get_kid(): - raise ValueError('Provided or resolved key-encryption-key does not match the id of key used to encrypt.') - # Will throw an exception if the specified algorithm is not supported. - content_encryption_key = key_encryption_key.unwrap_key(encryption_data.wrapped_content_key.encrypted_key, - encryption_data.wrapped_content_key.algorithm) - _validate_not_none('content_encryption_key', content_encryption_key) - - return content_encryption_key - - -def _decrypt_message(message, encryption_data, key_encryption_key=None, resolver=None): - ''' - Decrypts the given ciphertext using AES256 in CBC mode with 128 bit padding. - Unwraps the content-encryption-key using the user-provided or resolved key-encryption-key (kek). - Returns the original plaintex. - - :param str message: - The ciphertext to be decrypted. - :param _EncryptionData encryption_data: - The metadata associated with this ciphertext. - :param object key_encryption_key: - The user-provided key-encryption-key. Must implement the following methods: - unwrap_key(key, algorithm) - - returns the unwrapped form of the specified symmetric key using the string-specified algorithm. - get_kid() - - returns a string key id for this key-encryption-key. - :param function resolver(kid): - The user-provided key resolver. Uses the kid string to return a key-encryption-key - implementing the interface defined above. - :return: The decrypted plaintext. - :rtype: str - ''' - _validate_not_none('message', message) - content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, resolver) - - if _EncryptionAlgorithm.AES_CBC_256 != encryption_data.encryption_agent.encryption_algorithm: - raise ValueError('Specified encryption algorithm is not supported.') - - cipher = _generate_AES_CBC_cipher(content_encryption_key, encryption_data.content_encryption_IV) - - # decrypt data - decrypted_data = message - decryptor = cipher.decryptor() - decrypted_data = (decryptor.update(decrypted_data) + decryptor.finalize()) - - # unpad data - unpadder = PKCS7(128).unpadder() - decrypted_data = (unpadder.update(decrypted_data) + unpadder.finalize()) - - return decrypted_data - - -def encrypt_blob(blob, key_encryption_key): - ''' - Encrypts the given blob using AES256 in CBC mode with 128 bit padding. - Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek). - Returns a json-formatted string containing the encryption metadata. This method should - only be used when a blob is small enough for single shot upload. Encrypting larger blobs - is done as a part of the upload_data_chunks method. - - :param bytes blob: - The blob to be encrypted. - :param object key_encryption_key: - The user-provided key-encryption-key. Must implement the following methods: - wrap_key(key)--wraps the specified key using an algorithm of the user's choice. - get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key. - get_kid()--returns a string key id for this key-encryption-key. - :return: A tuple of json-formatted string containing the encryption metadata and the encrypted blob data. - :rtype: (str, bytes) - ''' - - _validate_not_none('blob', blob) - _validate_not_none('key_encryption_key', key_encryption_key) - _validate_key_encryption_key_wrap(key_encryption_key) - - # AES256 uses 256 bit (32 byte) keys and always with 16 byte blocks - content_encryption_key = urandom(32) - initialization_vector = urandom(16) - - cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector) - - # PKCS7 with 16 byte blocks ensures compatibility with AES. - padder = PKCS7(128).padder() - padded_data = padder.update(blob) + padder.finalize() - - # Encrypt the data. - encryptor = cipher.encryptor() - encrypted_data = encryptor.update(padded_data) + encryptor.finalize() - encryption_data = _generate_encryption_data_dict(key_encryption_key, content_encryption_key, - initialization_vector) - encryption_data['EncryptionMode'] = 'FullBlob' - - return dumps(encryption_data), encrypted_data - - -def generate_blob_encryption_data(key_encryption_key): - ''' - Generates the encryption_metadata for the blob. - - :param bytes key_encryption_key: - The key-encryption-key used to wrap the cek associate with this blob. - :return: A tuple containing the cek and iv for this blob as well as the - serialized encryption metadata for the blob. - :rtype: (bytes, bytes, str) - ''' - encryption_data = None - content_encryption_key = None - initialization_vector = None - if key_encryption_key: - _validate_key_encryption_key_wrap(key_encryption_key) - content_encryption_key = urandom(32) - initialization_vector = urandom(16) - encryption_data = _generate_encryption_data_dict(key_encryption_key, - content_encryption_key, - initialization_vector) - encryption_data['EncryptionMode'] = 'FullBlob' - encryption_data = dumps(encryption_data) - - return content_encryption_key, initialization_vector, encryption_data - - -def decrypt_blob(require_encryption, key_encryption_key, key_resolver, - content, start_offset, end_offset, response_headers): - ''' - Decrypts the given blob contents and returns only the requested range. - - :param bool require_encryption: - Whether or not the calling blob service requires objects to be decrypted. - :param object key_encryption_key: - The user-provided key-encryption-key. Must implement the following methods: - wrap_key(key)--wraps the specified key using an algorithm of the user's choice. - get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key. - get_kid()--returns a string key id for this key-encryption-key. - :param key_resolver(kid): - The user-provided key resolver. Uses the kid string to return a key-encryption-key - implementing the interface defined above. - :return: The decrypted blob content. - :rtype: bytes - ''' - try: - encryption_data = _dict_to_encryption_data(loads(response_headers['x-ms-meta-encryptiondata'])) - except: # pylint: disable=bare-except - if require_encryption: - raise ValueError( - 'Encryption required, but received data does not contain appropriate metatadata.' + \ - 'Data was either not encrypted or metadata has been lost.') - - return content - - if encryption_data.encryption_agent.encryption_algorithm != _EncryptionAlgorithm.AES_CBC_256: - raise ValueError('Specified encryption algorithm is not supported.') - - blob_type = response_headers['x-ms-blob-type'] - - iv = None - unpad = False - if 'content-range' in response_headers: - content_range = response_headers['content-range'] - # Format: 'bytes x-y/size' - - # Ignore the word 'bytes' - content_range = content_range.split(' ') - - content_range = content_range[1].split('-') - content_range = content_range[1].split('/') - end_range = int(content_range[0]) - blob_size = int(content_range[1]) - - if start_offset >= 16: - iv = content[:16] - content = content[16:] - start_offset -= 16 - else: - iv = encryption_data.content_encryption_IV - - if end_range == blob_size - 1: - unpad = True - else: - unpad = True - iv = encryption_data.content_encryption_IV - - if blob_type == 'PageBlob': - unpad = False - - content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, key_resolver) - cipher = _generate_AES_CBC_cipher(content_encryption_key, iv) - decryptor = cipher.decryptor() - - content = decryptor.update(content) + decryptor.finalize() - if unpad: - unpadder = PKCS7(128).unpadder() - content = unpadder.update(content) + unpadder.finalize() - - return content[start_offset: len(content) - end_offset] - - -def get_blob_encryptor_and_padder(cek, iv, should_pad): - encryptor = None - padder = None - - if cek is not None and iv is not None: - cipher = _generate_AES_CBC_cipher(cek, iv) - encryptor = cipher.encryptor() - padder = PKCS7(128).padder() if should_pad else None - - return encryptor, padder - - -def encrypt_queue_message(message, key_encryption_key): - ''' - Encrypts the given plain text message using AES256 in CBC mode with 128 bit padding. - Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek). - Returns a json-formatted string containing the encrypted message and the encryption metadata. - - :param object message: - The plain text message to be encrypted. - :param object key_encryption_key: - The user-provided key-encryption-key. Must implement the following methods: - wrap_key(key)--wraps the specified key using an algorithm of the user's choice. - get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key. - get_kid()--returns a string key id for this key-encryption-key. - :return: A json-formatted string containing the encrypted message and the encryption metadata. - :rtype: str - ''' - - _validate_not_none('message', message) - _validate_not_none('key_encryption_key', key_encryption_key) - _validate_key_encryption_key_wrap(key_encryption_key) - - # AES256 uses 256 bit (32 byte) keys and always with 16 byte blocks - content_encryption_key = os.urandom(32) - initialization_vector = os.urandom(16) - - # Queue encoding functions all return unicode strings, and encryption should - # operate on binary strings. - message = message.encode('utf-8') - - cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector) - - # PKCS7 with 16 byte blocks ensures compatibility with AES. - padder = PKCS7(128).padder() - padded_data = padder.update(message) + padder.finalize() - - # Encrypt the data. - encryptor = cipher.encryptor() - encrypted_data = encryptor.update(padded_data) + encryptor.finalize() - - # Build the dictionary structure. - queue_message = {'EncryptedMessageContents': encode_base64(encrypted_data), - 'EncryptionData': _generate_encryption_data_dict(key_encryption_key, - content_encryption_key, - initialization_vector)} - - return dumps(queue_message) - - -def decrypt_queue_message(message, response, require_encryption, key_encryption_key, resolver): - ''' - Returns the decrypted message contents from an EncryptedQueueMessage. - If no encryption metadata is present, will return the unaltered message. - :param str message: - The JSON formatted QueueEncryptedMessage contents with all associated metadata. - :param bool require_encryption: - If set, will enforce that the retrieved messages are encrypted and decrypt them. - :param object key_encryption_key: - The user-provided key-encryption-key. Must implement the following methods: - unwrap_key(key, algorithm) - - returns the unwrapped form of the specified symmetric key usingthe string-specified algorithm. - get_kid() - - returns a string key id for this key-encryption-key. - :param function resolver(kid): - The user-provided key resolver. Uses the kid string to return a key-encryption-key - implementing the interface defined above. - :return: The plain text message from the queue message. - :rtype: str - ''' - - try: - message = loads(message) - - encryption_data = _dict_to_encryption_data(message['EncryptionData']) - decoded_data = decode_base64_to_bytes(message['EncryptedMessageContents']) - except (KeyError, ValueError): - # Message was not json formatted and so was not encrypted - # or the user provided a json formatted message. - if require_encryption: - raise ValueError('Message was not encrypted.') - - return message - try: - return _decrypt_message(decoded_data, encryption_data, key_encryption_key, resolver).decode('utf-8') - except Exception as error: - raise HttpResponseError( - message="Decryption failed.", - response=response, - error=error) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/models.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/models.py index c51356bd885f..d78cd9113133 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/models.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/models.py @@ -5,6 +5,11 @@ # -------------------------------------------------------------------------- # pylint: disable=too-many-instance-attributes from enum import Enum +from typing import Optional + +from azure.core import CaseInsensitiveEnumMeta +from azure.core.configuration import Configuration +from azure.core.pipeline.policies import UserAgentPolicy def get_enum_value(value): @@ -16,170 +21,177 @@ def get_enum_value(value): return value -class StorageErrorCode(str, Enum): +class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): # Generic storage values - account_already_exists = "AccountAlreadyExists" - account_being_created = "AccountBeingCreated" - account_is_disabled = "AccountIsDisabled" - authentication_failed = "AuthenticationFailed" - authorization_failure = "AuthorizationFailure" - no_authentication_information = "NoAuthenticationInformation" - condition_headers_not_supported = "ConditionHeadersNotSupported" - condition_not_met = "ConditionNotMet" - empty_metadata_key = "EmptyMetadataKey" - insufficient_account_permissions = "InsufficientAccountPermissions" - internal_error = "InternalError" - invalid_authentication_info = "InvalidAuthenticationInfo" - invalid_header_value = "InvalidHeaderValue" - invalid_http_verb = "InvalidHttpVerb" - invalid_input = "InvalidInput" - invalid_md5 = "InvalidMd5" - invalid_metadata = "InvalidMetadata" - invalid_query_parameter_value = "InvalidQueryParameterValue" - invalid_range = "InvalidRange" - invalid_resource_name = "InvalidResourceName" - invalid_uri = "InvalidUri" - invalid_xml_document = "InvalidXmlDocument" - invalid_xml_node_value = "InvalidXmlNodeValue" - md5_mismatch = "Md5Mismatch" - metadata_too_large = "MetadataTooLarge" - missing_content_length_header = "MissingContentLengthHeader" - missing_required_query_parameter = "MissingRequiredQueryParameter" - missing_required_header = "MissingRequiredHeader" - missing_required_xml_node = "MissingRequiredXmlNode" - multiple_condition_headers_not_supported = "MultipleConditionHeadersNotSupported" - operation_timed_out = "OperationTimedOut" - out_of_range_input = "OutOfRangeInput" - out_of_range_query_parameter_value = "OutOfRangeQueryParameterValue" - request_body_too_large = "RequestBodyTooLarge" - resource_type_mismatch = "ResourceTypeMismatch" - request_url_failed_to_parse = "RequestUrlFailedToParse" - resource_already_exists = "ResourceAlreadyExists" - resource_not_found = "ResourceNotFound" - server_busy = "ServerBusy" - unsupported_header = "UnsupportedHeader" - unsupported_xml_node = "UnsupportedXmlNode" - unsupported_query_parameter = "UnsupportedQueryParameter" - unsupported_http_verb = "UnsupportedHttpVerb" + ACCOUNT_ALREADY_EXISTS = "AccountAlreadyExists" + ACCOUNT_BEING_CREATED = "AccountBeingCreated" + ACCOUNT_IS_DISABLED = "AccountIsDisabled" + AUTHENTICATION_FAILED = "AuthenticationFailed" + AUTHORIZATION_FAILURE = "AuthorizationFailure" + NO_AUTHENTICATION_INFORMATION = "NoAuthenticationInformation" + CONDITION_HEADERS_NOT_SUPPORTED = "ConditionHeadersNotSupported" + CONDITION_NOT_MET = "ConditionNotMet" + EMPTY_METADATA_KEY = "EmptyMetadataKey" + INSUFFICIENT_ACCOUNT_PERMISSIONS = "InsufficientAccountPermissions" + INTERNAL_ERROR = "InternalError" + INVALID_AUTHENTICATION_INFO = "InvalidAuthenticationInfo" + INVALID_HEADER_VALUE = "InvalidHeaderValue" + INVALID_HTTP_VERB = "InvalidHttpVerb" + INVALID_INPUT = "InvalidInput" + INVALID_MD5 = "InvalidMd5" + INVALID_METADATA = "InvalidMetadata" + INVALID_QUERY_PARAMETER_VALUE = "InvalidQueryParameterValue" + INVALID_RANGE = "InvalidRange" + INVALID_RESOURCE_NAME = "InvalidResourceName" + INVALID_URI = "InvalidUri" + INVALID_XML_DOCUMENT = "InvalidXmlDocument" + INVALID_XML_NODE_VALUE = "InvalidXmlNodeValue" + MD5_MISMATCH = "Md5Mismatch" + METADATA_TOO_LARGE = "MetadataTooLarge" + MISSING_CONTENT_LENGTH_HEADER = "MissingContentLengthHeader" + MISSING_REQUIRED_QUERY_PARAMETER = "MissingRequiredQueryParameter" + MISSING_REQUIRED_HEADER = "MissingRequiredHeader" + MISSING_REQUIRED_XML_NODE = "MissingRequiredXmlNode" + MULTIPLE_CONDITION_HEADERS_NOT_SUPPORTED = "MultipleConditionHeadersNotSupported" + OPERATION_TIMED_OUT = "OperationTimedOut" + OUT_OF_RANGE_INPUT = "OutOfRangeInput" + OUT_OF_RANGE_QUERY_PARAMETER_VALUE = "OutOfRangeQueryParameterValue" + REQUEST_BODY_TOO_LARGE = "RequestBodyTooLarge" + RESOURCE_TYPE_MISMATCH = "ResourceTypeMismatch" + REQUEST_URL_FAILED_TO_PARSE = "RequestUrlFailedToParse" + RESOURCE_ALREADY_EXISTS = "ResourceAlreadyExists" + RESOURCE_NOT_FOUND = "ResourceNotFound" + SERVER_BUSY = "ServerBusy" + UNSUPPORTED_HEADER = "UnsupportedHeader" + UNSUPPORTED_XML_NODE = "UnsupportedXmlNode" + UNSUPPORTED_QUERY_PARAMETER = "UnsupportedQueryParameter" + UNSUPPORTED_HTTP_VERB = "UnsupportedHttpVerb" # Blob values - append_position_condition_not_met = "AppendPositionConditionNotMet" - blob_already_exists = "BlobAlreadyExists" - blob_not_found = "BlobNotFound" - blob_overwritten = "BlobOverwritten" - blob_tier_inadequate_for_content_length = "BlobTierInadequateForContentLength" - block_count_exceeds_limit = "BlockCountExceedsLimit" - block_list_too_long = "BlockListTooLong" - cannot_change_to_lower_tier = "CannotChangeToLowerTier" - cannot_verify_copy_source = "CannotVerifyCopySource" - container_already_exists = "ContainerAlreadyExists" - container_being_deleted = "ContainerBeingDeleted" - container_disabled = "ContainerDisabled" - container_not_found = "ContainerNotFound" - content_length_larger_than_tier_limit = "ContentLengthLargerThanTierLimit" - copy_across_accounts_not_supported = "CopyAcrossAccountsNotSupported" - copy_id_mismatch = "CopyIdMismatch" - feature_version_mismatch = "FeatureVersionMismatch" - incremental_copy_blob_mismatch = "IncrementalCopyBlobMismatch" - incremental_copy_of_eralier_version_snapshot_not_allowed = "IncrementalCopyOfEralierVersionSnapshotNotAllowed" - incremental_copy_source_must_be_snapshot = "IncrementalCopySourceMustBeSnapshot" - infinite_lease_duration_required = "InfiniteLeaseDurationRequired" - invalid_blob_or_block = "InvalidBlobOrBlock" - invalid_blob_tier = "InvalidBlobTier" - invalid_blob_type = "InvalidBlobType" - invalid_block_id = "InvalidBlockId" - invalid_block_list = "InvalidBlockList" - invalid_operation = "InvalidOperation" - invalid_page_range = "InvalidPageRange" - invalid_source_blob_type = "InvalidSourceBlobType" - invalid_source_blob_url = "InvalidSourceBlobUrl" - invalid_version_for_page_blob_operation = "InvalidVersionForPageBlobOperation" - lease_already_present = "LeaseAlreadyPresent" - lease_already_broken = "LeaseAlreadyBroken" - lease_id_mismatch_with_blob_operation = "LeaseIdMismatchWithBlobOperation" - lease_id_mismatch_with_container_operation = "LeaseIdMismatchWithContainerOperation" - lease_id_mismatch_with_lease_operation = "LeaseIdMismatchWithLeaseOperation" - lease_id_missing = "LeaseIdMissing" - lease_is_breaking_and_cannot_be_acquired = "LeaseIsBreakingAndCannotBeAcquired" - lease_is_breaking_and_cannot_be_changed = "LeaseIsBreakingAndCannotBeChanged" - lease_is_broken_and_cannot_be_renewed = "LeaseIsBrokenAndCannotBeRenewed" - lease_lost = "LeaseLost" - lease_not_present_with_blob_operation = "LeaseNotPresentWithBlobOperation" - lease_not_present_with_container_operation = "LeaseNotPresentWithContainerOperation" - lease_not_present_with_lease_operation = "LeaseNotPresentWithLeaseOperation" - max_blob_size_condition_not_met = "MaxBlobSizeConditionNotMet" - no_pending_copy_operation = "NoPendingCopyOperation" - operation_not_allowed_on_incremental_copy_blob = "OperationNotAllowedOnIncrementalCopyBlob" - pending_copy_operation = "PendingCopyOperation" - previous_snapshot_cannot_be_newer = "PreviousSnapshotCannotBeNewer" - previous_snapshot_not_found = "PreviousSnapshotNotFound" - previous_snapshot_operation_not_supported = "PreviousSnapshotOperationNotSupported" - sequence_number_condition_not_met = "SequenceNumberConditionNotMet" - sequence_number_increment_too_large = "SequenceNumberIncrementTooLarge" - snapshot_count_exceeded = "SnapshotCountExceeded" - snaphot_operation_rate_exceeded = "SnaphotOperationRateExceeded" - snapshots_present = "SnapshotsPresent" - source_condition_not_met = "SourceConditionNotMet" - system_in_use = "SystemInUse" - target_condition_not_met = "TargetConditionNotMet" - unauthorized_blob_overwrite = "UnauthorizedBlobOverwrite" - blob_being_rehydrated = "BlobBeingRehydrated" - blob_archived = "BlobArchived" - blob_not_archived = "BlobNotArchived" + APPEND_POSITION_CONDITION_NOT_MET = "AppendPositionConditionNotMet" + BLOB_ACCESS_TIER_NOT_SUPPORTED_FOR_ACCOUNT_TYPE = "BlobAccessTierNotSupportedForAccountType" + BLOB_ALREADY_EXISTS = "BlobAlreadyExists" + BLOB_NOT_FOUND = "BlobNotFound" + BLOB_OVERWRITTEN = "BlobOverwritten" + BLOB_TIER_INADEQUATE_FOR_CONTENT_LENGTH = "BlobTierInadequateForContentLength" + BLOCK_COUNT_EXCEEDS_LIMIT = "BlockCountExceedsLimit" + BLOCK_LIST_TOO_LONG = "BlockListTooLong" + CANNOT_CHANGE_TO_LOWER_TIER = "CannotChangeToLowerTier" + CANNOT_VERIFY_COPY_SOURCE = "CannotVerifyCopySource" + CONTAINER_ALREADY_EXISTS = "ContainerAlreadyExists" + CONTAINER_BEING_DELETED = "ContainerBeingDeleted" + CONTAINER_DISABLED = "ContainerDisabled" + CONTAINER_NOT_FOUND = "ContainerNotFound" + CONTENT_LENGTH_LARGER_THAN_TIER_LIMIT = "ContentLengthLargerThanTierLimit" + COPY_ACROSS_ACCOUNTS_NOT_SUPPORTED = "CopyAcrossAccountsNotSupported" + COPY_ID_MISMATCH = "CopyIdMismatch" + FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" + INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" + INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" + #: Deprecated: Please use INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED instead. + INCREMENTAL_COPY_OF_ERALIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" + INCREMENTAL_COPY_SOURCE_MUST_BE_SNAPSHOT = "IncrementalCopySourceMustBeSnapshot" + INFINITE_LEASE_DURATION_REQUIRED = "InfiniteLeaseDurationRequired" + INVALID_BLOB_OR_BLOCK = "InvalidBlobOrBlock" + INVALID_BLOB_TIER = "InvalidBlobTier" + INVALID_BLOB_TYPE = "InvalidBlobType" + INVALID_BLOCK_ID = "InvalidBlockId" + INVALID_BLOCK_LIST = "InvalidBlockList" + INVALID_OPERATION = "InvalidOperation" + INVALID_PAGE_RANGE = "InvalidPageRange" + INVALID_SOURCE_BLOB_TYPE = "InvalidSourceBlobType" + INVALID_SOURCE_BLOB_URL = "InvalidSourceBlobUrl" + INVALID_VERSION_FOR_PAGE_BLOB_OPERATION = "InvalidVersionForPageBlobOperation" + LEASE_ALREADY_PRESENT = "LeaseAlreadyPresent" + LEASE_ALREADY_BROKEN = "LeaseAlreadyBroken" + LEASE_ID_MISMATCH_WITH_BLOB_OPERATION = "LeaseIdMismatchWithBlobOperation" + LEASE_ID_MISMATCH_WITH_CONTAINER_OPERATION = "LeaseIdMismatchWithContainerOperation" + LEASE_ID_MISMATCH_WITH_LEASE_OPERATION = "LeaseIdMismatchWithLeaseOperation" + LEASE_ID_MISSING = "LeaseIdMissing" + LEASE_IS_BREAKING_AND_CANNOT_BE_ACQUIRED = "LeaseIsBreakingAndCannotBeAcquired" + LEASE_IS_BREAKING_AND_CANNOT_BE_CHANGED = "LeaseIsBreakingAndCannotBeChanged" + LEASE_IS_BROKEN_AND_CANNOT_BE_RENEWED = "LeaseIsBrokenAndCannotBeRenewed" + LEASE_LOST = "LeaseLost" + LEASE_NOT_PRESENT_WITH_BLOB_OPERATION = "LeaseNotPresentWithBlobOperation" + LEASE_NOT_PRESENT_WITH_CONTAINER_OPERATION = "LeaseNotPresentWithContainerOperation" + LEASE_NOT_PRESENT_WITH_LEASE_OPERATION = "LeaseNotPresentWithLeaseOperation" + MAX_BLOB_SIZE_CONDITION_NOT_MET = "MaxBlobSizeConditionNotMet" + NO_PENDING_COPY_OPERATION = "NoPendingCopyOperation" + OPERATION_NOT_ALLOWED_ON_INCREMENTAL_COPY_BLOB = "OperationNotAllowedOnIncrementalCopyBlob" + PENDING_COPY_OPERATION = "PendingCopyOperation" + PREVIOUS_SNAPSHOT_CANNOT_BE_NEWER = "PreviousSnapshotCannotBeNewer" + PREVIOUS_SNAPSHOT_NOT_FOUND = "PreviousSnapshotNotFound" + PREVIOUS_SNAPSHOT_OPERATION_NOT_SUPPORTED = "PreviousSnapshotOperationNotSupported" + SEQUENCE_NUMBER_CONDITION_NOT_MET = "SequenceNumberConditionNotMet" + SEQUENCE_NUMBER_INCREMENT_TOO_LARGE = "SequenceNumberIncrementTooLarge" + SNAPSHOT_COUNT_EXCEEDED = "SnapshotCountExceeded" + SNAPSHOT_OPERATION_RATE_EXCEEDED = "SnapshotOperationRateExceeded" + #: Deprecated: Please use SNAPSHOT_OPERATION_RATE_EXCEEDED instead. + SNAPHOT_OPERATION_RATE_EXCEEDED = "SnapshotOperationRateExceeded" + SNAPSHOTS_PRESENT = "SnapshotsPresent" + SOURCE_CONDITION_NOT_MET = "SourceConditionNotMet" + SYSTEM_IN_USE = "SystemInUse" + TARGET_CONDITION_NOT_MET = "TargetConditionNotMet" + UNAUTHORIZED_BLOB_OVERWRITE = "UnauthorizedBlobOverwrite" + BLOB_BEING_REHYDRATED = "BlobBeingRehydrated" + BLOB_ARCHIVED = "BlobArchived" + BLOB_NOT_ARCHIVED = "BlobNotArchived" # Queue values - invalid_marker = "InvalidMarker" - message_not_found = "MessageNotFound" - message_too_large = "MessageTooLarge" - pop_receipt_mismatch = "PopReceiptMismatch" - queue_already_exists = "QueueAlreadyExists" - queue_being_deleted = "QueueBeingDeleted" - queue_disabled = "QueueDisabled" - queue_not_empty = "QueueNotEmpty" - queue_not_found = "QueueNotFound" + INVALID_MARKER = "InvalidMarker" + MESSAGE_NOT_FOUND = "MessageNotFound" + MESSAGE_TOO_LARGE = "MessageTooLarge" + POP_RECEIPT_MISMATCH = "PopReceiptMismatch" + QUEUE_ALREADY_EXISTS = "QueueAlreadyExists" + QUEUE_BEING_DELETED = "QueueBeingDeleted" + QUEUE_DISABLED = "QueueDisabled" + QUEUE_NOT_EMPTY = "QueueNotEmpty" + QUEUE_NOT_FOUND = "QueueNotFound" # File values - cannot_delete_file_or_directory = "CannotDeleteFileOrDirectory" - client_cache_flush_delay = "ClientCacheFlushDelay" - delete_pending = "DeletePending" - directory_not_empty = "DirectoryNotEmpty" - file_lock_conflict = "FileLockConflict" - invalid_file_or_directory_path_name = "InvalidFileOrDirectoryPathName" - parent_not_found = "ParentNotFound" - read_only_attribute = "ReadOnlyAttribute" - share_already_exists = "ShareAlreadyExists" - share_being_deleted = "ShareBeingDeleted" - share_disabled = "ShareDisabled" - share_not_found = "ShareNotFound" - sharing_violation = "SharingViolation" - share_snapshot_in_progress = "ShareSnapshotInProgress" - share_snapshot_count_exceeded = "ShareSnapshotCountExceeded" - share_snapshot_operation_not_supported = "ShareSnapshotOperationNotSupported" - share_has_snapshots = "ShareHasSnapshots" - container_quota_downgrade_not_allowed = "ContainerQuotaDowngradeNotAllowed" + CANNOT_DELETE_FILE_OR_DIRECTORY = "CannotDeleteFileOrDirectory" + CLIENT_CACHE_FLUSH_DELAY = "ClientCacheFlushDelay" + DELETE_PENDING = "DeletePending" + DIRECTORY_NOT_EMPTY = "DirectoryNotEmpty" + FILE_LOCK_CONFLICT = "FileLockConflict" + FILE_SHARE_PROVISIONED_BANDWIDTH_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedBandwidthDowngradeNotAllowed" + FILE_SHARE_PROVISIONED_IOPS_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedIopsDowngradeNotAllowed" + INVALID_FILE_OR_DIRECTORY_PATH_NAME = "InvalidFileOrDirectoryPathName" + PARENT_NOT_FOUND = "ParentNotFound" + READ_ONLY_ATTRIBUTE = "ReadOnlyAttribute" + SHARE_ALREADY_EXISTS = "ShareAlreadyExists" + SHARE_BEING_DELETED = "ShareBeingDeleted" + SHARE_DISABLED = "ShareDisabled" + SHARE_NOT_FOUND = "ShareNotFound" + SHARING_VIOLATION = "SharingViolation" + SHARE_SNAPSHOT_IN_PROGRESS = "ShareSnapshotInProgress" + SHARE_SNAPSHOT_COUNT_EXCEEDED = "ShareSnapshotCountExceeded" + SHARE_SNAPSHOT_OPERATION_NOT_SUPPORTED = "ShareSnapshotOperationNotSupported" + SHARE_HAS_SNAPSHOTS = "ShareHasSnapshots" + CONTAINER_QUOTA_DOWNGRADE_NOT_ALLOWED = "ContainerQuotaDowngradeNotAllowed" # DataLake values - content_length_must_be_zero = 'ContentLengthMustBeZero' - path_already_exists = 'PathAlreadyExists' - invalid_flush_position = 'InvalidFlushPosition' - invalid_property_name = 'InvalidPropertyName' - invalid_source_uri = 'InvalidSourceUri' - unsupported_rest_version = 'UnsupportedRestVersion' - file_system_not_found = 'FilesystemNotFound' - path_not_found = 'PathNotFound' - rename_destination_parent_path_not_found = 'RenameDestinationParentPathNotFound' - source_path_not_found = 'SourcePathNotFound' - destination_path_is_being_deleted = 'DestinationPathIsBeingDeleted' - file_system_already_exists = 'FilesystemAlreadyExists' - file_system_being_deleted = 'FilesystemBeingDeleted' - invalid_destination_path = 'InvalidDestinationPath' - invalid_rename_source_path = 'InvalidRenameSourcePath' - invalid_source_or_destination_resource_type = 'InvalidSourceOrDestinationResourceType' - lease_is_already_broken = 'LeaseIsAlreadyBroken' - lease_name_mismatch = 'LeaseNameMismatch' - path_conflict = 'PathConflict' - source_path_is_being_deleted = 'SourcePathIsBeingDeleted' + CONTENT_LENGTH_MUST_BE_ZERO = 'ContentLengthMustBeZero' + PATH_ALREADY_EXISTS = 'PathAlreadyExists' + INVALID_FLUSH_POSITION = 'InvalidFlushPosition' + INVALID_PROPERTY_NAME = 'InvalidPropertyName' + INVALID_SOURCE_URI = 'InvalidSourceUri' + UNSUPPORTED_REST_VERSION = 'UnsupportedRestVersion' + FILE_SYSTEM_NOT_FOUND = 'FilesystemNotFound' + PATH_NOT_FOUND = 'PathNotFound' + RENAME_DESTINATION_PARENT_PATH_NOT_FOUND = 'RenameDestinationParentPathNotFound' + SOURCE_PATH_NOT_FOUND = 'SourcePathNotFound' + DESTINATION_PATH_IS_BEING_DELETED = 'DestinationPathIsBeingDeleted' + FILE_SYSTEM_ALREADY_EXISTS = 'FilesystemAlreadyExists' + FILE_SYSTEM_BEING_DELETED = 'FilesystemBeingDeleted' + INVALID_DESTINATION_PATH = 'InvalidDestinationPath' + INVALID_RENAME_SOURCE_PATH = 'InvalidRenameSourcePath' + INVALID_SOURCE_OR_DESTINATION_RESOURCE_TYPE = 'InvalidSourceOrDestinationResourceType' + LEASE_IS_ALREADY_BROKEN = 'LeaseIsAlreadyBroken' + LEASE_NAME_MISMATCH = 'LeaseNameMismatch' + PATH_CONFLICT = 'PathConflict' + SOURCE_PATH_IS_BEING_DELETED = 'SourcePathIsBeingDeleted' class DictMixin(object): @@ -199,19 +211,22 @@ def __len__(self): def __delitem__(self, key): self.__dict__[key] = None + # Compare objects by comparing all attributes. def __eq__(self, other): - """Compare objects by comparing all attributes.""" if isinstance(other, self.__class__): return self.__dict__ == other.__dict__ return False + # Compare objects by comparing all attributes. def __ne__(self, other): - """Compare objects by comparing all attributes.""" return not self.__eq__(other) def __str__(self): return str({k: v for k, v in self.__dict__.items() if not k.startswith('_')}) + def __contains__(self, key): + return key in self.__dict__ + def has_key(self, k): return k in self.__dict__ @@ -260,7 +275,17 @@ class ResourceTypes(object): files(e.g. Put Blob, Query Entity, Get Messages, Create File, etc.) """ - def __init__(self, service=False, container=False, object=False): # pylint: disable=redefined-builtin + service: bool = False + container: bool = False + object: bool = False + _str: str + + def __init__( + self, + service: bool = False, + container: bool = False, + object: bool = False # pylint: disable=redefined-builtin + ) -> None: self.service = service self.container = container self.object = object @@ -289,7 +314,7 @@ def from_string(cls, string): res_object = 'o' in string parsed = cls(res_service, res_container, res_object) - parsed._str = string # pylint: disable = protected-access + parsed._str = string return parsed @@ -328,14 +353,46 @@ class AccountSasPermissions(object): To enable set or get tags on the blobs in the container. :keyword bool filter_by_tags: To enable get blobs by tags, this should be used together with list permission. + :keyword bool set_immutability_policy: + To enable operations related to set/delete immutability policy. + To get immutability policy, you just need read permission. + :keyword bool permanent_delete: + To enable permanent delete on the blob is permitted. + Valid for Object resource type of Blob only. """ - def __init__(self, read=False, write=False, delete=False, - list=False, # pylint: disable=redefined-builtin - add=False, create=False, update=False, process=False, delete_previous_version=False, **kwargs): + + read: bool = False + write: bool = False + delete: bool = False + delete_previous_version: bool = False + list: bool = False + add: bool = False + create: bool = False + update: bool = False + process: bool = False + tag: bool = False + filter_by_tags: bool = False + set_immutability_policy: bool = False + permanent_delete: bool = False + + def __init__( + self, + read: bool = False, + write: bool = False, + delete: bool = False, + list: bool = False, # pylint: disable=redefined-builtin + add: bool = False, + create: bool = False, + update: bool = False, + process: bool = False, + delete_previous_version: bool = False, + **kwargs + ) -> None: self.read = read self.write = write self.delete = delete self.delete_previous_version = delete_previous_version + self.permanent_delete = kwargs.pop('permanent_delete', False) self.list = list self.add = add self.create = create @@ -343,17 +400,20 @@ def __init__(self, read=False, write=False, delete=False, self.process = process self.tag = kwargs.pop('tag', False) self.filter_by_tags = kwargs.pop('filter_by_tags', False) + self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) self._str = (('r' if self.read else '') + ('w' if self.write else '') + ('d' if self.delete else '') + ('x' if self.delete_previous_version else '') + + ('y' if self.permanent_delete else '') + ('l' if self.list else '') + ('a' if self.add else '') + ('c' if self.create else '') + ('u' if self.update else '') + ('p' if self.process else '') + ('f' if self.filter_by_tags else '') + - ('t' if self.tag else '') + ('t' if self.tag else '') + + ('i' if self.set_immutability_policy else '') ) def __str__(self): @@ -376,6 +436,7 @@ def from_string(cls, permission): p_write = 'w' in permission p_delete = 'd' in permission p_delete_previous_version = 'x' in permission + p_permanent_delete = 'y' in permission p_list = 'l' in permission p_add = 'a' in permission p_create = 'c' in permission @@ -383,24 +444,32 @@ def from_string(cls, permission): p_process = 'p' in permission p_tag = 't' in permission p_filter_by_tags = 'f' in permission + p_set_immutability_policy = 'i' in permission parsed = cls(read=p_read, write=p_write, delete=p_delete, delete_previous_version=p_delete_previous_version, list=p_list, add=p_add, create=p_create, update=p_update, process=p_process, tag=p_tag, - filter_by_tags=p_filter_by_tags) + filter_by_tags=p_filter_by_tags, set_immutability_policy=p_set_immutability_policy, + permanent_delete=p_permanent_delete) return parsed + class Services(object): """Specifies the services accessible with the account SAS. - :param bool blob: - Access for the `~azure.storage.blob.BlobServiceClient` - :param bool queue: - Access for the `~azure.storage.queue.QueueServiceClient` - :param bool fileshare: - Access for the `~azure.storage.fileshare.ShareServiceClient` + :keyword bool blob: + Access for the `~azure.storage.blob.BlobServiceClient`. Default is False. + :keyword bool queue: + Access for the `~azure.storage.queue.QueueServiceClient`. Default is False. + :keyword bool fileshare: + Access for the `~azure.storage.fileshare.ShareServiceClient`. Default is False. """ - def __init__(self, blob=False, queue=False, fileshare=False): + def __init__( + self, *, + blob: bool = False, + queue: bool = False, + fileshare: bool = False + ) -> None: self.blob = blob self.queue = queue self.fileshare = fileshare @@ -428,8 +497,8 @@ def from_string(cls, string): res_queue = 'q' in string res_file = 'f' in string - parsed = cls(res_blob, res_queue, res_file) - parsed._str = string # pylint: disable = protected-access + parsed = cls(blob=res_blob, queue=res_queue, fileshare=res_file) + parsed._str = string return parsed @@ -440,22 +509,23 @@ class UserDelegationKey(object): The fields are saved as simple strings since the user does not have to interact with this object; to generate an identify SAS, the user can simply pass it to the right API. - - :ivar str signed_oid: - Object ID of this token. - :ivar str signed_tid: - Tenant ID of the tenant that issued this token. - :ivar str signed_start: - The datetime this token becomes valid. - :ivar str signed_expiry: - The datetime this token expires. - :ivar str signed_service: - What service this key is valid for. - :ivar str signed_version: - The version identifier of the REST service that created this token. - :ivar str value: - The user delegation key. """ + + signed_oid: Optional[str] = None + """Object ID of this token.""" + signed_tid: Optional[str] = None + """Tenant ID of the tenant that issued this token.""" + signed_start: Optional[str] = None + """The datetime this token becomes valid.""" + signed_expiry: Optional[str] = None + """The datetime this token expires.""" + signed_service: Optional[str] = None + """What service this key is valid for.""" + signed_version: Optional[str] = None + """The version identifier of the REST service that created this token.""" + value: Optional[str] = None + """The user delegation key.""" + def __init__(self): self.signed_oid = None self.signed_tid = None @@ -464,3 +534,52 @@ def __init__(self): self.signed_service = None self.signed_version = None self.value = None + + +class StorageConfiguration(Configuration): + """ + Specifies the configurable values used in Azure Storage. + + :param int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :param int copy_polling_interval: The interval in seconds for polling copy operations. + :param int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :param int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. + :param bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :param int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :param int min_large_chunk_upload_threshold: The max size for a single put operation. + :param int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :param int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. + :param int max_range_size: The max range size for file upload. + + """ + + max_single_put_size: int + copy_polling_interval: int + max_block_size: int + min_large_block_upload_threshold: int + use_byte_buffer: bool + max_page_size: int + min_large_chunk_upload_threshold: int + max_single_get_size: int + max_chunk_get_size: int + max_range_size: int + user_agent_policy: UserAgentPolicy + + def __init__(self, **kwargs): + super(StorageConfiguration, self).__init__(**kwargs) + self.max_single_put_size = kwargs.pop('max_single_put_size', 64 * 1024 * 1024) + self.copy_polling_interval = 15 + self.max_block_size = kwargs.pop('max_block_size', 4 * 1024 * 1024) + self.min_large_block_upload_threshold = kwargs.get('min_large_block_upload_threshold', 4 * 1024 * 1024 + 1) + self.use_byte_buffer = kwargs.pop('use_byte_buffer', False) + self.max_page_size = kwargs.pop('max_page_size', 4 * 1024 * 1024) + self.min_large_chunk_upload_threshold = kwargs.pop('min_large_chunk_upload_threshold', 100 * 1024 * 1024 + 1) + self.max_single_get_size = kwargs.pop('max_single_get_size', 32 * 1024 * 1024) + self.max_chunk_get_size = kwargs.pop('max_chunk_get_size', 4 * 1024 * 1024) + self.max_range_size = kwargs.pop('max_range_size', 4 * 1024 * 1024) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/parser.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/parser.py index c6feba8a6393..112c1984f4fb 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/parser.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/parser.py @@ -4,17 +4,50 @@ # license information. # -------------------------------------------------------------------------- -import sys +from datetime import datetime, timezone +from typing import Optional -if sys.version_info < (3,): - def _str(value): - if isinstance(value, unicode): # pylint: disable=undefined-variable - return value.encode('utf-8') +EPOCH_AS_FILETIME = 116444736000000000 # January 1, 1970 as MS filetime +HUNDREDS_OF_NANOSECONDS = 10000000 - return str(value) -else: - _str = str - -def _to_utc_datetime(value): +def _to_utc_datetime(value: datetime) -> str: return value.strftime('%Y-%m-%dT%H:%M:%SZ') + + +def _rfc_1123_to_datetime(rfc_1123: str) -> Optional[datetime]: + """Converts an RFC 1123 date string to a UTC datetime. + + :param str rfc_1123: The time and date in RFC 1123 format. + :returns: The time and date in UTC datetime format. + :rtype: datetime + """ + if not rfc_1123: + return None + + return datetime.strptime(rfc_1123, "%a, %d %b %Y %H:%M:%S %Z") + + +def _filetime_to_datetime(filetime: str) -> Optional[datetime]: + """Converts an MS filetime string to a UTC datetime. "0" indicates None. + If parsing MS Filetime fails, tries RFC 1123 as backup. + + :param str filetime: The time and date in MS filetime format. + :returns: The time and date in UTC datetime format. + :rtype: datetime + """ + if not filetime: + return None + + # Try to convert to MS Filetime + try: + temp_filetime = int(filetime) + if temp_filetime == 0: + return None + + return datetime.fromtimestamp((temp_filetime - EPOCH_AS_FILETIME) / HUNDREDS_OF_NANOSECONDS, tz=timezone.utc) + except ValueError: + pass + + # Try RFC 1123 as backup + return _rfc_1123_to_datetime(filetime) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/policies.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/policies.py index 377007f5ff2b..ee75cd5a466c 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/policies.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/policies.py @@ -6,48 +6,55 @@ import base64 import hashlib -import re -import random -from time import time -from io import SEEK_SET, UnsupportedOperation import logging +import random +import re import uuid -import types -from typing import Any, TYPE_CHECKING +from io import SEEK_SET, UnsupportedOperation +from time import time +from typing import Any, Dict, Optional, TYPE_CHECKING from urllib.parse import ( - urlparse, parse_qsl, - urlunparse, urlencode, + urlparse, + urlunparse, ) from wsgiref.handlers import format_date_time +from azure.core.exceptions import AzureError, ServiceRequestError, ServiceResponseError from azure.core.pipeline.policies import ( + BearerTokenCredentialPolicy, HeadersPolicy, - SansIOHTTPPolicy, - NetworkTraceLoggingPolicy, HTTPPolicy, - RequestHistory + NetworkTraceLoggingPolicy, + RequestHistory, + SansIOHTTPPolicy ) -from azure.core.exceptions import AzureError, ServiceRequestError, ServiceResponseError +from .authentication import AzureSigningError, StorageHttpChallenge +from .constants import DEFAULT_OAUTH_SCOPE from .models import LocationMode - if TYPE_CHECKING: - from azure.core.pipeline import PipelineRequest, PipelineResponse + from azure.core.credentials import TokenCredential + from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import + PipelineRequest, + PipelineResponse + ) _LOGGER = logging.getLogger(__name__) def encode_base64(data): + if isinstance(data, str): + data = data.encode('utf-8') encoded = base64.b64encode(data) return encoded.decode('utf-8') +# Are we out of retries? def is_exhausted(settings): - """Are we out of retries?""" retry_counts = (settings['total'], settings['connect'], settings['read'], settings['status']) retry_counts = list(filter(None, retry_counts)) if not retry_counts: @@ -60,13 +67,12 @@ def retry_hook(settings, **kwargs): settings['hook'](retry_count=settings['count'] - 1, location_mode=settings['mode'], **kwargs) +# Is this method/status code retryable? (Based on allowlists and control +# variables such as the number of total retries to allow, whether to +# respect the Retry-After header, whether this header is present, and +# whether the returned status code is on the list of status codes to +# be retried upon on the presence of the aforementioned header) def is_retry(response, mode): - """Is this method/status code retryable? (Based on allowlists and control - variables such as the number of total retries to allow, whether to - respect the Retry-After header, whether this header is present, and - whether the returned status code is on the list of status codes to - be retried upon on the presence of the aforementioned header) - """ status = response.http_response.status_code if 300 <= status < 500: # An exception occurred, but in most cases it was expected. Examples could @@ -87,6 +93,16 @@ def is_retry(response, mode): return False +def is_checksum_retry(response): + # retry if invalid content md5 + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + + def urljoin(base_url, stub_url): parsed = urlparse(base_url) parsed = parsed._replace(path=parsed.path + '/' + stub_url) @@ -106,8 +122,7 @@ def on_request(self, request): class StorageHeadersPolicy(HeadersPolicy): request_id_header_name = 'x-ms-client-request-id' - def on_request(self, request): - # type: (PipelineRequest, Any) -> None + def on_request(self, request: "PipelineRequest") -> None: super(StorageHeadersPolicy, self).on_request(request) current_time = format_date_time(time()) request.http_request.headers['x-ms-date'] = current_time @@ -137,8 +152,7 @@ def __init__(self, hosts=None, **kwargs): # pylint: disable=unused-argument self.hosts = hosts super(StorageHosts, self).__init__() - def on_request(self, request): - # type: (PipelineRequest, Any) -> None + def on_request(self, request: "PipelineRequest") -> None: request.context.options['hosts'] = self.hosts parsed_url = urlparse(request.http_request.url) @@ -154,7 +168,7 @@ def on_request(self, request): # Lock retries to the specific location request.context.options['retry_to_secondary'] = False if use_location not in self.hosts: - raise ValueError("Attempting to use undefined host location {}".format(use_location)) + raise ValueError(f"Attempting to use undefined host location {use_location}") if use_location != location_mode: # Update request URL to use the specified location updated = parsed_url._replace(netloc=self.hosts[use_location]) @@ -170,10 +184,14 @@ class StorageLoggingPolicy(NetworkTraceLoggingPolicy): This accepts both global configuration, and per-request level with "enable_http_logger" """ - def on_request(self, request): - # type: (PipelineRequest, Any) -> None + def __init__(self, logging_enable: bool = False, **kwargs) -> None: + self.logging_body = kwargs.pop("logging_body", False) + super(StorageLoggingPolicy, self).__init__(logging_enable=logging_enable, **kwargs) + + def on_request(self, request: "PipelineRequest") -> None: http_request = request.http_request options = request.context.options + self.logging_body = self.logging_body or options.pop("logging_body", False) if options.pop("logging_enable", self.enable_http_logger): request.context["logging_enable"] = True if not _LOGGER.isEnabledFor(logging.DEBUG): @@ -202,16 +220,15 @@ def on_request(self, request): _LOGGER.debug(" %r: %r", header, value) _LOGGER.debug("Request body:") - # We don't want to log the binary data of a file upload. - if isinstance(http_request.body, types.GeneratorType): - _LOGGER.debug("File upload") - else: + if self.logging_body: _LOGGER.debug(str(http_request.body)) + else: + # We don't want to log the binary data of a file upload. + _LOGGER.debug("Hidden body, please use logging_body to show body") except Exception as err: # pylint: disable=broad-except _LOGGER.debug("Failed to log request: %r", err) - def on_response(self, request, response): - # type: (PipelineRequest, PipelineResponse, Any) -> None + def on_response(self, request: "PipelineRequest", response: "PipelineResponse") -> None: if response.context.pop("logging_enable", self.enable_http_logger): if not _LOGGER.isEnabledFor(logging.DEBUG): return @@ -226,31 +243,35 @@ def on_response(self, request, response): _LOGGER.debug("Response content:") pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE) header = response.http_response.headers.get('content-disposition') + resp_content_type = response.http_response.headers.get("content-type", "") if header and pattern.match(header): filename = header.partition('=')[2] _LOGGER.debug("File attachments: %s", filename) - elif response.http_response.headers.get("content-type", "").endswith("octet-stream"): + elif resp_content_type.endswith("octet-stream"): _LOGGER.debug("Body contains binary data.") - elif response.http_response.headers.get("content-type", "").startswith("image"): + elif resp_content_type.startswith("image"): _LOGGER.debug("Body contains image data.") - else: - if response.context.options.get('stream', False): + + if self.logging_body and resp_content_type.startswith("text"): + _LOGGER.debug(response.http_response.text()) + elif self.logging_body: + try: + _LOGGER.debug(response.http_response.body()) + except ValueError: _LOGGER.debug("Body is streamable") - else: - _LOGGER.debug(response.http_response.text()) + except Exception as err: # pylint: disable=broad-except _LOGGER.debug("Failed to log response: %s", repr(err)) class StorageRequestHook(SansIOHTTPPolicy): - def __init__(self, **kwargs): # pylint: disable=unused-argument + def __init__(self, **kwargs): self._request_callback = kwargs.get('raw_request_hook') super(StorageRequestHook, self).__init__() - def on_request(self, request): - # type: (PipelineRequest, **Any) -> PipelineResponse + def on_request(self, request: "PipelineRequest") -> None: request_callback = request.context.options.pop('raw_request_hook', self._request_callback) if request_callback: request_callback(request) @@ -258,24 +279,33 @@ def on_request(self, request): class StorageResponseHook(HTTPPolicy): - def __init__(self, **kwargs): # pylint: disable=unused-argument + def __init__(self, **kwargs): self._response_callback = kwargs.get('raw_response_hook') super(StorageResponseHook, self).__init__() - def send(self, request): - # type: (PipelineRequest) -> PipelineResponse - data_stream_total = request.context.get('data_stream_total') or \ - request.context.options.pop('data_stream_total', None) - download_stream_current = request.context.get('download_stream_current') or \ - request.context.options.pop('download_stream_current', None) - upload_stream_current = request.context.get('upload_stream_current') or \ - request.context.options.pop('upload_stream_current', None) + def send(self, request: "PipelineRequest") -> "PipelineResponse": + # Values could be 0 + data_stream_total = request.context.get('data_stream_total') + if data_stream_total is None: + data_stream_total = request.context.options.pop('data_stream_total', None) + download_stream_current = request.context.get('download_stream_current') + if download_stream_current is None: + download_stream_current = request.context.options.pop('download_stream_current', None) + upload_stream_current = request.context.get('upload_stream_current') + if upload_stream_current is None: + upload_stream_current = request.context.options.pop('upload_stream_current', None) + response_callback = request.context.get('response_callback') or \ request.context.options.pop('raw_response_hook', self._response_callback) response = self.next.send(request) - will_retry = is_retry(response, request.context.options.get('mode')) - if not will_retry and download_stream_current is not None: + + will_retry = is_retry(response, request.context.options.get('mode')) or is_checksum_retry(response) + # Auth error could come from Bearer challenge, in which case this request will be made again + is_auth_error = response.http_response.status_code == 401 + should_update_counts = not (will_retry or is_auth_error) + + if should_update_counts and download_stream_current is not None: download_stream_current += int(response.http_response.headers.get('Content-Length', 0)) if data_stream_total is None: content_range = response.http_response.headers.get('Content-Range') @@ -283,12 +313,13 @@ def send(self, request): data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1]) else: data_stream_total = download_stream_current - elif not will_retry and upload_stream_current is not None: + elif should_update_counts and upload_stream_current is not None: upload_stream_current += int(response.http_request.headers.get('Content-Length', 0)) for pipeline_obj in [request, response]: - pipeline_obj.context['data_stream_total'] = data_stream_total - pipeline_obj.context['download_stream_current'] = download_stream_current - pipeline_obj.context['upload_stream_current'] = upload_stream_current + if hasattr(pipeline_obj, 'context'): + pipeline_obj.context['data_stream_total'] = data_stream_total + pipeline_obj.context['download_stream_current'] = download_stream_current + pipeline_obj.context['upload_stream_current'] = upload_stream_current if response_callback: response_callback(response) request.context['response_callback'] = response_callback @@ -303,11 +334,14 @@ class StorageContentValidation(SansIOHTTPPolicy): """ header_name = 'Content-MD5' - def __init__(self, **kwargs): # pylint: disable=unused-argument + def __init__(self, **kwargs: Any) -> None: # pylint: disable=unused-argument super(StorageContentValidation, self).__init__() @staticmethod def get_content_md5(data): + # Since HTTP does not differentiate between no content and empty content, + # we have to perform a None check. + data = data or b"" md5 = hashlib.md5() # nosec if isinstance(data, bytes): md5.update(data) @@ -321,15 +355,14 @@ def get_content_md5(data): md5.update(chunk) try: data.seek(pos, SEEK_SET) - except (AttributeError, IOError): - raise ValueError("Data should be bytes or a seekable file-like object.") + except (AttributeError, IOError) as exc: + raise ValueError("Data should be bytes or a seekable file-like object.") from exc else: raise ValueError("Data should be bytes or a seekable file-like object.") return md5.digest() - def on_request(self, request): - # type: (PipelineRequest, Any) -> None + def on_request(self, request: "PipelineRequest") -> None: validate_content = request.context.options.pop('validate_content', False) if validate_content and request.http_request.method != 'GET': computed_md5 = encode_base64(StorageContentValidation.get_content_md5(request.http_request.data)) @@ -337,14 +370,14 @@ def on_request(self, request): request.context['validate_content_md5'] = computed_md5 request.context['validate_content'] = validate_content - def on_response(self, request, response): + def on_response(self, request: "PipelineRequest", response: "PipelineResponse") -> None: if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): computed_md5 = request.context.get('validate_content_md5') or \ encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) if response.http_response.headers['content-md5'] != computed_md5: - raise AzureError( - 'MD5 mismatch. Expected value is \'{0}\', computed value is \'{1}\'.'.format( - response.http_response.headers['content-md5'], computed_md5), + raise AzureError(( + f"MD5 mismatch. Expected value is '{response.http_response.headers['content-md5']}', " + f"computed value is '{computed_md5}'."), response=response.http_response ) @@ -354,7 +387,18 @@ class StorageRetryPolicy(HTTPPolicy): The base class for Exponential and Linear retries containing shared code. """ - def __init__(self, **kwargs): + total_retries: int + """The max number of retries.""" + connect_retries: int + """The max number of connect retries.""" + retry_read: int + """The max number of read retries.""" + retry_status: int + """The max number of status retries.""" + retry_to_secondary: bool + """Whether the secondary endpoint should be retried.""" + + def __init__(self, **kwargs: Any) -> None: self.total_retries = kwargs.pop('retry_total', 10) self.connect_retries = kwargs.pop('retry_connect', 3) self.read_retries = kwargs.pop('retry_read', 3) @@ -362,13 +406,12 @@ def __init__(self, **kwargs): self.retry_to_secondary = kwargs.pop('retry_to_secondary', False) super(StorageRetryPolicy, self).__init__() - def _set_next_host_location(self, settings, request): # pylint: disable=no-self-use + def _set_next_host_location(self, settings: Dict[str, Any], request: "PipelineRequest") -> None: """ A function which sets the next host location on the request, if applicable. - :param ~azure.storage.models.RetryContext context: - The retry context containing the previous host location and the request - to evaluate and possibly modify. + :param Dict[str, Any]] settings: The configurable values pertaining to the next host location. + :param PipelineRequest request: A pipeline request object. """ if settings['hosts'] and all(settings['hosts'].values()): url = urlparse(request.url) @@ -380,7 +423,7 @@ def _set_next_host_location(self, settings, request): # pylint: disable=no-self updated = url._replace(netloc=settings['hosts'].get(settings['mode'])) request.url = updated.geturl() - def configure_retries(self, request): # pylint: disable=no-self-use + def configure_retries(self, request: "PipelineRequest") -> Dict[str, Any]: body_position = None if hasattr(request.http_request.body, 'read'): try: @@ -403,10 +446,12 @@ def configure_retries(self, request): # pylint: disable=no-self-use 'history': [] } - def get_backoff_time(self, settings): # pylint: disable=unused-argument,no-self-use + def get_backoff_time(self, settings: Dict[str, Any]) -> float: # pylint: disable=unused-argument """ Formula for computing the current backoff. Should be calculated by child class. + :param Dict[str, Any] settings: The configurable values pertaining to the backoff time. + :returns: The backoff time. :rtype: float """ return 0 @@ -417,14 +462,21 @@ def sleep(self, settings, transport): return transport.sleep(backoff) - def increment(self, settings, request, response=None, error=None): + def increment( + self, settings: Dict[str, Any], + request: "PipelineRequest", + response: Optional["PipelineResponse"] = None, + error: Optional[AzureError] = None + ) -> bool: """Increment the retry counters. - :param response: A pipeline response object. - :param error: An error encountered during the request, or + :param Dict[str, Any] settings: The configurable values pertaining to the increment operation. + :param PipelineRequest request: A pipeline request object. + :param Optional[PipelineResponse] response: A pipeline response object. + :param Optional[AzureError] error: An error encountered during the request, or None if the response was received successfully. - - :return: Whether the retry attempts are exhausted. + :returns: Whether the retry attempts are exhausted. + :rtype: bool """ settings['total'] -= 1 @@ -473,7 +525,7 @@ def send(self, request): while retries_remaining: try: response = self.next.send(request) - if is_retry(response, retry_settings['mode']): + if is_retry(response, retry_settings['mode']) or is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, @@ -488,6 +540,8 @@ def send(self, request): continue break except AzureError as err: + if isinstance(err, AzureSigningError): + raise retries_remaining = self.increment( retry_settings, request=request.http_request, error=err) if retries_remaining: @@ -508,21 +562,33 @@ def send(self, request): class ExponentialRetry(StorageRetryPolicy): """Exponential retry.""" - def __init__(self, initial_backoff=15, increment_base=3, retry_total=3, - retry_to_secondary=False, random_jitter_range=3, **kwargs): - ''' + initial_backoff: int + """The initial backoff interval, in seconds, for the first retry.""" + increment_base: int + """The base, in seconds, to increment the initial_backoff by after the + first retry.""" + random_jitter_range: int + """A number in seconds which indicates a range to jitter/randomize for the back-off interval.""" + + def __init__( + self, initial_backoff: int = 15, + increment_base: int = 3, + retry_total: int = 3, + retry_to_secondary: bool = False, + random_jitter_range: int = 3, + **kwargs: Any + ) -> None: + """ Constructs an Exponential retry object. The initial_backoff is used for the first retry. Subsequent retries are retried after initial_backoff + - increment_power^retry_count seconds. For example, by default the first retry - occurs after 15 seconds, the second after (15+3^1) = 18 seconds, and the - third after (15+3^2) = 24 seconds. + increment_power^retry_count seconds. :param int initial_backoff: The initial backoff interval, in seconds, for the first retry. :param int increment_base: The base, in seconds, to increment the initial_backoff by after the first retry. - :param int max_attempts: + :param int retry_total: The maximum number of retry attempts. :param bool retry_to_secondary: Whether the request should be retried to secondary, if able. This should @@ -531,21 +597,22 @@ def __init__(self, initial_backoff=15, increment_base=3, retry_total=3, :param int random_jitter_range: A number in seconds which indicates a range to jitter/randomize for the back-off interval. For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. - ''' + """ self.initial_backoff = initial_backoff self.increment_base = increment_base self.random_jitter_range = random_jitter_range super(ExponentialRetry, self).__init__( retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) - def get_backoff_time(self, settings): + def get_backoff_time(self, settings: Dict[str, Any]) -> float: """ Calculates how long to sleep before retrying. - :return: - An integer indicating how long to wait before retrying the request, + :param Dict[str, Any]] settings: The configurable values pertaining to get backoff time. + :returns: + A float indicating how long to wait before retrying the request, or None to indicate no retry should be performed. - :rtype: int or None + :rtype: float """ random_generator = random.Random() backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count'])) @@ -557,13 +624,24 @@ def get_backoff_time(self, settings): class LinearRetry(StorageRetryPolicy): """Linear retry.""" - def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_jitter_range=3, **kwargs): + initial_backoff: int + """The backoff interval, in seconds, between retries.""" + random_jitter_range: int + """A number in seconds which indicates a range to jitter/randomize for the back-off interval.""" + + def __init__( + self, backoff: int = 15, + retry_total: int = 3, + retry_to_secondary: bool = False, + random_jitter_range: int = 3, + **kwargs: Any + ) -> None: """ Constructs a Linear retry object. :param int backoff: The backoff interval, in seconds, between retries. - :param int max_attempts: + :param int retry_total: The maximum number of retry attempts. :param bool retry_to_secondary: Whether the request should be retried to secondary, if able. This should @@ -578,14 +656,15 @@ def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_j super(LinearRetry, self).__init__( retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) - def get_backoff_time(self, settings): + def get_backoff_time(self, settings: Dict[str, Any]) -> float: """ Calculates how long to sleep before retrying. - :return: - An integer indicating how long to wait before retrying the request, + :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time. + :returns: + A float indicating how long to wait before retrying the request, or None to indicate no retry should be performed. - :rtype: int or None + :rtype: float """ random_generator = random.Random() # the backoff interval normally does not change, however there is the possibility @@ -594,3 +673,22 @@ def get_backoff_time(self, settings): if self.backoff > self.random_jitter_range else 0 random_range_end = self.backoff + self.random_jitter_range return random_generator.uniform(random_range_start, random_range_end) + + +class StorageBearerTokenCredentialPolicy(BearerTokenCredentialPolicy): + """ Custom Bearer token credential policy for following Storage Bearer challenges """ + + def __init__(self, credential: "TokenCredential", audience: str, **kwargs: Any) -> None: + super(StorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs) + + def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool: + try: + auth_header = response.http_response.headers.get("WWW-Authenticate") + challenge = StorageHttpChallenge(auth_header) + except ValueError: + return False + + scope = challenge.resource_id + DEFAULT_OAUTH_SCOPE + self.authorize_request(request, scope, tenant_id=challenge.tenant_id) + + return True diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/policies_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/policies_async.py index e0926b81dbc5..b8574a19f1ed 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/policies_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/policies_async.py @@ -6,17 +6,23 @@ # pylint: disable=invalid-overridden-method import asyncio -import random import logging -from typing import Any, TYPE_CHECKING +import random +from typing import Any, Dict, TYPE_CHECKING -from azure.core.pipeline.policies import AsyncHTTPPolicy -from azure.core.exceptions import AzureError +from azure.core.exceptions import AzureError, StreamClosedError, StreamConsumedError +from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy -from .policies import is_retry, StorageRetryPolicy +from .authentication import AzureSigningError, StorageHttpChallenge +from .constants import DEFAULT_OAUTH_SCOPE +from .policies import encode_base64, is_retry, StorageContentValidation, StorageRetryPolicy if TYPE_CHECKING: - from azure.core.pipeline import PipelineRequest, PipelineResponse + from azure.core.credentials_async import AsyncTokenCredential + from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import + PipelineRequest, + PipelineResponse + ) _LOGGER = logging.getLogger(__name__) @@ -36,28 +42,49 @@ async def retry_hook(settings, **kwargs): **kwargs) +async def is_checksum_retry(response): + # retry if invalid content md5 + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + try: + await response.http_response.read() # Load the body in memory and close the socket + except (StreamClosedError, StreamConsumedError): + pass + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.content)) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + + class AsyncStorageResponseHook(AsyncHTTPPolicy): - def __init__(self, **kwargs): # pylint: disable=unused-argument + def __init__(self, **kwargs): self._response_callback = kwargs.get('raw_response_hook') super(AsyncStorageResponseHook, self).__init__() - async def send(self, request): - # type: (PipelineRequest) -> PipelineResponse - data_stream_total = request.context.get('data_stream_total') or \ - request.context.options.pop('data_stream_total', None) - download_stream_current = request.context.get('download_stream_current') or \ - request.context.options.pop('download_stream_current', None) - upload_stream_current = request.context.get('upload_stream_current') or \ - request.context.options.pop('upload_stream_current', None) + async def send(self, request: "PipelineRequest") -> "PipelineResponse": + # Values could be 0 + data_stream_total = request.context.get('data_stream_total') + if data_stream_total is None: + data_stream_total = request.context.options.pop('data_stream_total', None) + download_stream_current = request.context.get('download_stream_current') + if download_stream_current is None: + download_stream_current = request.context.options.pop('download_stream_current', None) + upload_stream_current = request.context.get('upload_stream_current') + if upload_stream_current is None: + upload_stream_current = request.context.options.pop('upload_stream_current', None) + response_callback = request.context.get('response_callback') or \ request.context.options.pop('raw_response_hook', self._response_callback) response = await self.next.send(request) - await response.http_response.load_body() + will_retry = is_retry(response, request.context.options.get('mode')) or await is_checksum_retry(response) + + # Auth error could come from Bearer challenge, in which case this request will be made again + is_auth_error = response.http_response.status_code == 401 + should_update_counts = not (will_retry or is_auth_error) - will_retry = is_retry(response, request.context.options.get('mode')) - if not will_retry and download_stream_current is not None: + if should_update_counts and download_stream_current is not None: download_stream_current += int(response.http_response.headers.get('Content-Length', 0)) if data_stream_total is None: content_range = response.http_response.headers.get('Content-Range') @@ -65,15 +92,16 @@ async def send(self, request): data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1]) else: data_stream_total = download_stream_current - elif not will_retry and upload_stream_current is not None: + elif should_update_counts and upload_stream_current is not None: upload_stream_current += int(response.http_request.headers.get('Content-Length', 0)) for pipeline_obj in [request, response]: - pipeline_obj.context['data_stream_total'] = data_stream_total - pipeline_obj.context['download_stream_current'] = download_stream_current - pipeline_obj.context['upload_stream_current'] = upload_stream_current + if hasattr(pipeline_obj, 'context'): + pipeline_obj.context['data_stream_total'] = data_stream_total + pipeline_obj.context['download_stream_current'] = download_stream_current + pipeline_obj.context['upload_stream_current'] = upload_stream_current if response_callback: if asyncio.iscoroutine(response_callback): - await response_callback(response) + await response_callback(response) # type: ignore else: response_callback(response) request.context['response_callback'] = response_callback @@ -97,7 +125,7 @@ async def send(self, request): while retries_remaining: try: response = await self.next.send(request) - if is_retry(response, retry_settings['mode']): + if is_retry(response, retry_settings['mode']) or await is_checksum_retry(response): retries_remaining = self.increment( retry_settings, request=request.http_request, @@ -112,6 +140,8 @@ async def send(self, request): continue break except AzureError as err: + if isinstance(err, AzureSigningError): + raise retries_remaining = self.increment( retry_settings, request=request.http_request, error=err) if retries_remaining: @@ -132,9 +162,23 @@ async def send(self, request): class ExponentialRetry(AsyncStorageRetryPolicy): """Exponential retry.""" - def __init__(self, initial_backoff=15, increment_base=3, retry_total=3, - retry_to_secondary=False, random_jitter_range=3, **kwargs): - ''' + initial_backoff: int + """The initial backoff interval, in seconds, for the first retry.""" + increment_base: int + """The base, in seconds, to increment the initial_backoff by after the + first retry.""" + random_jitter_range: int + """A number in seconds which indicates a range to jitter/randomize for the back-off interval.""" + + def __init__( + self, + initial_backoff: int = 15, + increment_base: int = 3, + retry_total: int = 3, + retry_to_secondary: bool = False, + random_jitter_range: int = 3, **kwargs + ) -> None: + """ Constructs an Exponential retry object. The initial_backoff is used for the first retry. Subsequent retries are retried after initial_backoff + increment_power^retry_count seconds. For example, by default the first retry @@ -155,17 +199,18 @@ def __init__(self, initial_backoff=15, increment_base=3, retry_total=3, :param int random_jitter_range: A number in seconds which indicates a range to jitter/randomize for the back-off interval. For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. - ''' + """ self.initial_backoff = initial_backoff self.increment_base = increment_base self.random_jitter_range = random_jitter_range super(ExponentialRetry, self).__init__( retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) - def get_backoff_time(self, settings): + def get_backoff_time(self, settings: Dict[str, Any]) -> float: """ Calculates how long to sleep before retrying. + :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time. :return: An integer indicating how long to wait before retrying the request, or None to indicate no retry should be performed. @@ -181,7 +226,18 @@ def get_backoff_time(self, settings): class LinearRetry(AsyncStorageRetryPolicy): """Linear retry.""" - def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_jitter_range=3, **kwargs): + initial_backoff: int + """The backoff interval, in seconds, between retries.""" + random_jitter_range: int + """A number in seconds which indicates a range to jitter/randomize for the back-off interval.""" + + def __init__( + self, backoff: int = 15, + retry_total: int = 3, + retry_to_secondary: bool = False, + random_jitter_range: int = 3, + **kwargs: Any + ) -> None: """ Constructs a Linear retry object. @@ -202,10 +258,11 @@ def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_j super(LinearRetry, self).__init__( retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) - def get_backoff_time(self, settings): + def get_backoff_time(self, settings: Dict[str, Any]) -> float: """ Calculates how long to sleep before retrying. + :param Dict[str, Any]] settings: The configurable values pertaining to the backoff time. :return: An integer indicating how long to wait before retrying the request, or None to indicate no retry should be performed. @@ -218,3 +275,22 @@ def get_backoff_time(self, settings): if self.backoff > self.random_jitter_range else 0 random_range_end = self.backoff + self.random_jitter_range return random_generator.uniform(random_range_start, random_range_end) + + +class AsyncStorageBearerTokenCredentialPolicy(AsyncBearerTokenCredentialPolicy): + """ Custom Bearer token credential policy for following Storage Bearer challenges """ + + def __init__(self, credential: "AsyncTokenCredential", audience: str, **kwargs: Any) -> None: + super(AsyncStorageBearerTokenCredentialPolicy, self).__init__(credential, audience, **kwargs) + + async def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool: + try: + auth_header = response.http_response.headers.get("WWW-Authenticate") + challenge = StorageHttpChallenge(auth_header) + except ValueError: + return False + + scope = challenge.resource_id + DEFAULT_OAUTH_SCOPE + await self.authorize_request(request, scope, tenant_id=challenge.tenant_id) + + return True diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/request_handlers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/request_handlers.py index 50f463482d24..54927cc73979 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/request_handlers.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/request_handlers.py @@ -4,22 +4,21 @@ # license information. # -------------------------------------------------------------------------- -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, Dict, List, Type, Tuple, - TYPE_CHECKING -) - import logging -from os import fstat +import stat from io import (SEEK_END, SEEK_SET, UnsupportedOperation) +from os import fstat +from typing import Dict, Optional import isodate -from azure.core.exceptions import raise_with_traceback - _LOGGER = logging.getLogger(__name__) +_REQUEST_DELIMITER_PREFIX = "batch_" +_HTTP1_1_IDENTIFIER = "HTTP/1.1" +_HTTP_LINE_ENDING = "\r\n" + def serialize_iso(attr): """Serialize Datetime object into ISO-8601 formatted string. @@ -37,17 +36,12 @@ def serialize_iso(attr): if utc.tm_year > 9999 or utc.tm_year < 1: raise OverflowError("Hit max or min date") - date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( - utc.tm_year, utc.tm_mon, utc.tm_mday, - utc.tm_hour, utc.tm_min, utc.tm_sec) + date = f"{utc.tm_year:04}-{utc.tm_mon:02}-{utc.tm_mday:02}T{utc.tm_hour:02}:{utc.tm_min:02}:{utc.tm_sec:02}" return date + 'Z' except (ValueError, OverflowError) as err: - msg = "Unable to serialize datetime object." - raise_with_traceback(ValueError, msg, err) + raise ValueError("Unable to serialize datetime object.") from err except AttributeError as err: - msg = "ISO-8601 object must be valid Datetime object." - raise_with_traceback(TypeError, msg, err) - + raise TypeError("ISO-8601 object must be valid datetime object.") from err def get_length(data): length = None @@ -66,7 +60,11 @@ def get_length(data): pass else: try: - return fstat(fileno).st_size + mode = fstat(fileno).st_mode + if stat.S_ISREG(mode) or stat.S_ISLNK(mode): + #st_size only meaningful if regular file or symlink, other types + # e.g. sockets may return misleading sizes like 0 + return fstat(fileno).st_size except OSError: # Not a valid fileno, may be possible requests returned # a socket number? @@ -78,7 +76,7 @@ def get_length(data): data.seek(0, SEEK_END) length = data.tell() - current_position data.seek(current_position, SEEK_SET) - except (AttributeError, UnsupportedOperation): + except (AttributeError, OSError, UnsupportedOperation): pass return length @@ -113,18 +111,18 @@ def validate_and_format_range_headers( # Page ranges must be 512 aligned if align_to_page: if start_range is not None and start_range % 512 != 0: - raise ValueError("Invalid page blob start_range: {0}. " - "The size must be aligned to a 512-byte boundary.".format(start_range)) + raise ValueError(f"Invalid page blob start_range: {start_range}. " + "The size must be aligned to a 512-byte boundary.") if end_range is not None and end_range % 512 != 511: - raise ValueError("Invalid page blob end_range: {0}. " - "The size must be aligned to a 512-byte boundary.".format(end_range)) + raise ValueError(f"Invalid page blob end_range: {end_range}. " + "The size must be aligned to a 512-byte boundary.") # Format based on whether end_range is present range_header = None if end_range is not None: - range_header = 'bytes={0}-{1}'.format(start_range, end_range) + range_header = f'bytes={start_range}-{end_range}' elif start_range is not None: - range_header = "bytes={0}-".format(start_range) + range_header = f"bytes={start_range}-" # Content MD5 can only be provided for a complete range less than 4MB in size range_validation = None @@ -143,5 +141,130 @@ def add_metadata_headers(metadata=None): headers = {} if metadata: for key, value in metadata.items(): - headers['x-ms-meta-{}'.format(key.strip())] = value.strip() if value else value + headers[f'x-ms-meta-{key.strip()}'] = value.strip() if value else value return headers + + +def serialize_batch_body(requests, batch_id): + """ + -- + + -- + (repeated as needed) + ---- + + Serializes the requests in this batch to a single HTTP mixed/multipart body. + + :param List[~azure.core.pipeline.transport.HttpRequest] requests: + a list of sub-request for the batch request + :param str batch_id: + to be embedded in batch sub-request delimiter + :returns: The body bytes for this batch. + :rtype: bytes + """ + + if requests is None or len(requests) == 0: + raise ValueError('Please provide sub-request(s) for this batch request') + + delimiter_bytes = (_get_batch_request_delimiter(batch_id, True, False) + _HTTP_LINE_ENDING).encode('utf-8') + newline_bytes = _HTTP_LINE_ENDING.encode('utf-8') + batch_body = [] + + content_index = 0 + for request in requests: + request.headers.update({ + "Content-ID": str(content_index), + "Content-Length": str(0) + }) + batch_body.append(delimiter_bytes) + batch_body.append(_make_body_from_sub_request(request)) + batch_body.append(newline_bytes) + content_index += 1 + + batch_body.append(_get_batch_request_delimiter(batch_id, True, True).encode('utf-8')) + # final line of body MUST have \r\n at the end, or it will not be properly read by the service + batch_body.append(newline_bytes) + + return b"".join(batch_body) + + +def _get_batch_request_delimiter(batch_id, is_prepend_dashes=False, is_append_dashes=False): + """ + Gets the delimiter used for this batch request's mixed/multipart HTTP format. + + :param str batch_id: + Randomly generated id + :param bool is_prepend_dashes: + Whether to include the starting dashes. Used in the body, but non on defining the delimiter. + :param bool is_append_dashes: + Whether to include the ending dashes. Used in the body on the closing delimiter only. + :returns: The delimiter, WITHOUT a trailing newline. + :rtype: str + """ + + prepend_dashes = '--' if is_prepend_dashes else '' + append_dashes = '--' if is_append_dashes else '' + + return prepend_dashes + _REQUEST_DELIMITER_PREFIX + batch_id + append_dashes + + +def _make_body_from_sub_request(sub_request): + """ + Content-Type: application/http + Content-ID: + Content-Transfer-Encoding: (if present) + + HTTP/ +
:
(repeated as necessary) + Content-Length: + (newline if content length > 0) + (if content length > 0) + + Serializes an http request. + + :param ~azure.core.pipeline.transport.HttpRequest sub_request: + Request to serialize. + :returns: The serialized sub-request in bytes + :rtype: bytes + """ + + # put the sub-request's headers into a list for efficient str concatenation + sub_request_body = [] + + # get headers for ease of manipulation; remove headers as they are used + headers = sub_request.headers + + # append opening headers + sub_request_body.append("Content-Type: application/http") + sub_request_body.append(_HTTP_LINE_ENDING) + + sub_request_body.append("Content-ID: ") + sub_request_body.append(headers.pop("Content-ID", "")) + sub_request_body.append(_HTTP_LINE_ENDING) + + sub_request_body.append("Content-Transfer-Encoding: binary") + sub_request_body.append(_HTTP_LINE_ENDING) + + # append blank line + sub_request_body.append(_HTTP_LINE_ENDING) + + # append HTTP verb and path and query and HTTP version + sub_request_body.append(sub_request.method) + sub_request_body.append(' ') + sub_request_body.append(sub_request.url) + sub_request_body.append(' ') + sub_request_body.append(_HTTP1_1_IDENTIFIER) + sub_request_body.append(_HTTP_LINE_ENDING) + + # append remaining headers (this will set the Content-Length, as it was set on `sub-request`) + for header_name, header_value in headers.items(): + if header_value is not None: + sub_request_body.append(header_name) + sub_request_body.append(": ") + sub_request_body.append(header_value) + sub_request_body.append(_HTTP_LINE_ENDING) + + # append blank line + sub_request_body.append(_HTTP_LINE_ENDING) + + return ''.join(sub_request_body).encode() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/response_handlers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/response_handlers.py index 4b591dd7877c..af9a2fcdcdc2 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/response_handlers.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/response_handlers.py @@ -3,29 +3,23 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, Dict, List, Type, Tuple, - TYPE_CHECKING -) import logging +from typing import NoReturn +from xml.etree.ElementTree import Element -from azure.core.pipeline.policies import ContentDecodePolicy from azure.core.exceptions import ( + ClientAuthenticationError, + DecodeError, HttpResponseError, - ResourceNotFoundError, - ResourceModifiedError, ResourceExistsError, - ClientAuthenticationError, - DecodeError) + ResourceModifiedError, + ResourceNotFoundError, +) +from azure.core.pipeline.policies import ContentDecodePolicy +from .authentication import AzureSigningError +from .models import get_enum_value, StorageErrorCode, UserDelegationKey from .parser import _to_utc_datetime -from .models import StorageErrorCode, UserDelegationKey, get_enum_value - - -if TYPE_CHECKING: - from datetime import datetime - from azure.core.exceptions import AzureError _LOGGER = logging.getLogger(__name__) @@ -44,10 +38,8 @@ def __init__(self, message, response, parts): super(PartialBatchErrorException, self).__init__(message=message, response=response) +# Parses the blob length from the content range header: bytes 1-3/65537 def parse_length_from_content_range(content_range): - ''' - Parses the blob length from the content range header: bytes 1-3/65537 - ''' if content_range is None: return None @@ -67,7 +59,10 @@ def normalize_headers(headers): def deserialize_metadata(response, obj, headers): # pylint: disable=unused-argument - raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.startswith("x-ms-meta-")} + try: + raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.lower().startswith('x-ms-meta-')} + except AttributeError: + raw_metadata = {k: v for k, v in response.headers.items() if k.lower().startswith('x-ms-meta-')} return {k[10:]: v for k, v in raw_metadata.items()} @@ -83,29 +78,59 @@ def return_context_and_deserialized(response, deserialized, response_headers): return response.http_response.location_mode, deserialized -def process_storage_error(storage_error): - # If storage_error is one of the two then it has already been processed and serialized to the specific exception. - if isinstance(storage_error, (PartialBatchErrorException, ClientAuthenticationError)): - raise storage_error +def return_raw_deserialized(response, *_): + return response.http_response.location_mode, response.context[ContentDecodePolicy.CONTEXT_NAME] + + +def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # pylint:disable=too-many-statements, too-many-branches raise_error = HttpResponseError + serialized = False + if isinstance(storage_error, AzureSigningError): + storage_error.message = storage_error.message + \ + '. This is likely due to an invalid shared key. Please check your shared key and try again.' + if not storage_error.response or storage_error.response.status_code in [200, 204]: + raise storage_error + # If it is one of those three then it has been serialized prior by the generated layer. + if isinstance(storage_error, (PartialBatchErrorException, + ClientAuthenticationError, ResourceNotFoundError, ResourceExistsError)): + serialized = True error_code = storage_error.response.headers.get('x-ms-error-code') error_message = storage_error.message additional_data = {} + error_dict = {} try: error_body = ContentDecodePolicy.deserialize_from_http_generics(storage_error.response) - if error_body: - for info in error_body.iter(): - if info.tag.lower() == 'code': - error_code = info.text - elif info.tag.lower() == 'message': - error_message = info.text - else: - additional_data[info.tag] = info.text + try: + if error_body is None or len(error_body) == 0: + error_body = storage_error.response.reason + except AttributeError: + error_body = '' + # If it is an XML response + if isinstance(error_body, Element): + error_dict = { + child.tag.lower(): child.text + for child in error_body + } + # If it is a JSON response + elif isinstance(error_body, dict): + error_dict = error_body.get('error', {}) + elif not error_code: + _LOGGER.warning( + 'Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.', type(error_body)) + error_dict = {'message': str(error_body)} + + # If we extracted from a Json or XML response + # There is a chance error_dict is just a string + if error_dict and isinstance(error_dict, dict): + error_code = error_dict.get('code') + error_message = error_dict.get('message') + additional_data = {k: v for k, v in error_dict.items() if k not in {'code', 'message'}} except DecodeError: pass try: - if error_code: + # This check would be unnecessary if we have already serialized the error + if error_code and not serialized: error_code = StorageErrorCode(error_code) if error_code in [StorageErrorCode.condition_not_met, StorageErrorCode.blob_overwritten]: @@ -137,17 +162,30 @@ def process_storage_error(storage_error): # Got an unknown error code pass + # Error message should include all the error properties try: - error_message += "\nErrorCode:{}".format(error_code.value) + error_message += f"\nErrorCode:{error_code.value}" except AttributeError: - error_message += "\nErrorCode:{}".format(error_code) + error_message += f"\nErrorCode:{error_code}" for name, info in additional_data.items(): - error_message += "\n{}:{}".format(name, info) - - error = raise_error(message=error_message, response=storage_error.response) + error_message += f"\n{name}:{info}" + + # No need to create an instance if it has already been serialized by the generated layer + if serialized: + storage_error.message = error_message + error = storage_error + else: + error = raise_error(message=error_message, response=storage_error.response) + # Ensure these properties are stored in the error instance as well (not just the error message) error.error_code = error_code error.additional_info = additional_data - error.raise_with_traceback() + # error.args is what's surfaced on the traceback - show error message in all cases + error.args = (error.message,) + try: + # `from None` prevents us from double printing the exception (suppresses generated layer error context) + exec("raise error from None") # pylint: disable=exec-used # nosec + except SyntaxError as exc: + raise error from exc def parse_to_internal_user_delegation_key(service_user_delegation_key): diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/shared_access_signature.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/shared_access_signature.py index 07aad5ffa1c8..df29222b873e 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/shared_access_signature.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/shared_access_signature.py @@ -3,14 +3,16 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-keyword-should-match-keyword-only from datetime import date -from .parser import _str, _to_utc_datetime +from .parser import _to_utc_datetime from .constants import X_MS_VERSION from . import sign_string, url_quote - +# cspell:ignoreRegExp rsc. +# cspell:ignoreRegExp s..?id class QueryStringConstants(object): SIGNED_SIGNATURE = 'sig' SIGNED_PERMISSION = 'sp' @@ -38,6 +40,7 @@ class QueryStringConstants(object): SIGNED_KEY_EXPIRY = 'ske' SIGNED_KEY_SERVICE = 'sks' SIGNED_KEY_VERSION = 'skv' + SIGNED_ENCRYPTION_SCOPE = 'ses' # for ADLS SIGNED_AUTHORIZED_OID = 'saoid' @@ -74,6 +77,7 @@ def to_list(): QueryStringConstants.SIGNED_KEY_EXPIRY, QueryStringConstants.SIGNED_KEY_SERVICE, QueryStringConstants.SIGNED_KEY_VERSION, + QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, # for ADLS QueryStringConstants.SIGNED_AUTHORIZED_OID, QueryStringConstants.SIGNED_UNAUTHORIZED_OID, @@ -103,13 +107,23 @@ def __init__(self, account_name, account_key, x_ms_version=X_MS_VERSION): self.account_key = account_key self.x_ms_version = x_ms_version - def generate_account(self, services, resource_types, permission, expiry, start=None, - ip=None, protocol=None): + def generate_account( + self, services, + resource_types, + permission, + expiry, + start=None, + ip=None, + protocol=None, + sts_hook=None, + **kwargs + ) -> str: ''' Generates a shared access signature for the account. Use the returned signature with the sas_token parameter of the service or to create a new account object. + :param Any services: The specified services associated with the shared access signature. :param ResourceTypes resource_types: Specifies the resource types that are accessible with the account SAS. You can combine values to provide access to more than one @@ -132,9 +146,8 @@ def generate_account(self, services, resource_types, permission, expiry, start=N :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. + storage service receives the request. The provided datetime will always + be interpreted as UTC. :type start: datetime or str :param str ip: Specifies an IP address or a range of IP addresses from which to accept requests. @@ -145,22 +158,39 @@ def generate_account(self, services, resource_types, permission, expiry, start=N :param str protocol: Specifies the protocol permitted for a request made. The default value is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values. + :keyword str encryption_scope: + Optional. If specified, this is the encryption scope to use when sending requests + authorized with this SAS URI. + :param sts_hook: + For debugging purposes only. If provided, the hook is called with the string to sign + that was used to generate the SAS. + :type sts_hook: Optional[Callable[[str], None]] + :returns: The generated SAS token for the account. + :rtype: str ''' sas = _SharedAccessHelper() sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) sas.add_account(services, resource_types) + sas.add_encryption_scope(**kwargs) sas.add_account_signature(self.account_name, self.account_key) + if sts_hook is not None: + sts_hook(sas.string_to_sign) + return sas.get_token() class _SharedAccessHelper(object): def __init__(self): self.query_dict = {} + self.string_to_sign = "" def _add_query(self, name, val): if val: - self.query_dict[name] = _str(val) if val is not None else None + self.query_dict[name] = str(val) if val is not None else None + + def add_encryption_scope(self, **kwargs): + self._add_query(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, kwargs.pop('encryption_scope', None)) def add_base(self, permission, expiry, start, ip, protocol, x_ms_version): if isinstance(start, date): @@ -211,10 +241,12 @@ def get_value_to_append(query): get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) + get_value_to_append(QueryStringConstants.SIGNED_IP) + get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) + - get_value_to_append(QueryStringConstants.SIGNED_VERSION)) + get_value_to_append(QueryStringConstants.SIGNED_VERSION) + + get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE)) self._add_query(QueryStringConstants.SIGNED_SIGNATURE, sign_string(account_key, string_to_sign)) + self.string_to_sign = string_to_sign - def get_token(self): - return '&'.join(['{0}={1}'.format(n, url_quote(v)) for n, v in self.query_dict.items() if v is not None]) + def get_token(self) -> str: + return '&'.join([f'{n}={url_quote(v)}' for n, v in self.query_dict.items() if v is not None]) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/uploads.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/uploads.py index acdc16f01ff7..b31cfb3291d9 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/uploads.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/uploads.py @@ -3,22 +3,18 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=no-self-use from concurrent import futures -from io import (BytesIO, IOBase, SEEK_CUR, SEEK_END, SEEK_SET, UnsupportedOperation) -from threading import Lock +from io import BytesIO, IOBase, SEEK_CUR, SEEK_END, SEEK_SET, UnsupportedOperation from itertools import islice from math import ceil - -import six +from threading import Lock from azure.core.tracing.common import with_current_context -from . import encode_base64, url_quote +from .import encode_base64, url_quote from .request_handlers import get_length from .response_handlers import return_response_headers -from .encryption import get_blob_encryptor_and_padder _LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 @@ -52,17 +48,9 @@ def upload_data_chunks( max_concurrency=None, stream=None, validate_content=None, - encryption_options=None, + progress_hook=None, **kwargs): - if encryption_options: - encryptor, padder = get_blob_encryptor_and_padder( - encryption_options.get('cek'), - encryption_options.get('vector'), - uploader_class is not PageBlobChunkUploader) - kwargs['encryptor'] = encryptor - kwargs['padder'] = padder - parallel = max_concurrency > 1 if parallel and 'modified_access_conditions' in kwargs: # Access conditions do not work with parallelism @@ -75,15 +63,16 @@ def upload_data_chunks( stream=stream, parallel=parallel, validate_content=validate_content, + progress_hook=progress_hook, **kwargs) if parallel: - executor = futures.ThreadPoolExecutor(max_concurrency) - upload_tasks = uploader.get_chunk_streams() - running_futures = [ - executor.submit(with_current_context(uploader.process_chunk), u) - for u in islice(upload_tasks, 0, max_concurrency) - ] - range_ids = _parallel_uploads(executor, uploader.process_chunk, upload_tasks, running_futures) + with futures.ThreadPoolExecutor(max_concurrency) as executor: + upload_tasks = uploader.get_chunk_streams() + running_futures = [ + executor.submit(with_current_context(uploader.process_chunk), u) + for u in islice(upload_tasks, 0, max_concurrency) + ] + range_ids = _parallel_uploads(executor, uploader.process_chunk, upload_tasks, running_futures) else: range_ids = [uploader.process_chunk(result) for result in uploader.get_chunk_streams()] if any(range_ids): @@ -98,6 +87,7 @@ def upload_substream_blocks( chunk_size=None, max_concurrency=None, stream=None, + progress_hook=None, **kwargs): parallel = max_concurrency > 1 if parallel and 'modified_access_conditions' in kwargs: @@ -109,24 +99,36 @@ def upload_substream_blocks( chunk_size=chunk_size, stream=stream, parallel=parallel, + progress_hook=progress_hook, **kwargs) if parallel: - executor = futures.ThreadPoolExecutor(max_concurrency) - upload_tasks = uploader.get_substream_blocks() - running_futures = [ - executor.submit(with_current_context(uploader.process_substream_block), u) - for u in islice(upload_tasks, 0, max_concurrency) - ] - range_ids = _parallel_uploads(executor, uploader.process_substream_block, upload_tasks, running_futures) + with futures.ThreadPoolExecutor(max_concurrency) as executor: + upload_tasks = uploader.get_substream_blocks() + running_futures = [ + executor.submit(with_current_context(uploader.process_substream_block), u) + for u in islice(upload_tasks, 0, max_concurrency) + ] + range_ids = _parallel_uploads(executor, uploader.process_substream_block, upload_tasks, running_futures) else: range_ids = [uploader.process_substream_block(b) for b in uploader.get_substream_blocks()] - return sorted(range_ids) + if any(range_ids): + return sorted(range_ids) + return [] class _ChunkUploader(object): # pylint: disable=too-many-instance-attributes - def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor=None, padder=None, **kwargs): + def __init__( + self, service, + total_size, + chunk_size, + stream, + parallel, + encryptor=None, + padder=None, + progress_hook=None, + **kwargs): self.service = service self.total_size = total_size self.chunk_size = chunk_size @@ -134,12 +136,12 @@ def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor= self.parallel = parallel # Stream management - self.stream_start = stream.tell() if parallel else None self.stream_lock = Lock() if parallel else None # Progress feedback self.progress_total = 0 self.progress_lock = Lock() if parallel else None + self.progress_hook = progress_hook # Encryption self.encryptor = encryptor @@ -160,7 +162,7 @@ def get_chunk_streams(self): if self.total_size: read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data))) temp = self.stream.read(read_size) - if not isinstance(temp, six.binary_type): + if not isinstance(temp, bytes): raise TypeError("Blob data should be of type bytes.") data += temp or b"" @@ -197,6 +199,9 @@ def _update_progress(self, length): else: self.progress_total += length + if self.progress_hook: + self.progress_hook(self.progress_total, self.total_size) + def _upload_chunk(self, chunk_offset, chunk_data): raise NotImplementedError("Must be implemented by child class.") @@ -221,16 +226,16 @@ def get_substream_blocks(self): for i in range(blocks): index = i * self.chunk_size length = last_block_size if i == blocks - 1 else self.chunk_size - yield ('BlockId{}'.format("%05d" % i), SubStream(self.stream, index, length, lock)) + yield index, SubStream(self.stream, index, length, lock) def process_substream_block(self, block_data): return self._upload_substream_block_with_progress(block_data[0], block_data[1]) - def _upload_substream_block(self, block_id, block_stream): + def _upload_substream_block(self, index, block_stream): raise NotImplementedError("Must be implemented by child class.") - def _upload_substream_block_with_progress(self, block_id, block_stream): - range_id = self._upload_substream_block(block_id, block_stream) + def _upload_substream_block_with_progress(self, index, block_stream): + range_id = self._upload_substream_block(index, block_stream) self._update_progress(len(block_stream)) return range_id @@ -248,7 +253,7 @@ def __init__(self, *args, **kwargs): def _upload_chunk(self, chunk_offset, chunk_data): # TODO: This is incorrect, but works with recording. - index = '{0:032d}'.format(chunk_offset) + index = f'{chunk_offset:032d}' block_id = encode_base64(url_quote(encode_base64(index))) self.service.stage_block( block_id, @@ -260,8 +265,9 @@ def _upload_chunk(self, chunk_offset, chunk_data): ) return index, block_id - def _upload_substream_block(self, block_id, block_stream): + def _upload_substream_block(self, index, block_stream): try: + block_id = f'BlockId{(index//self.chunk_size):05}' self.service.stage_block( block_id, len(block_stream), @@ -275,7 +281,7 @@ def _upload_substream_block(self, block_id, block_stream): return block_id -class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method +class PageBlobChunkUploader(_ChunkUploader): def _is_chunk_empty(self, chunk_data): # read until non-zero byte is encountered @@ -286,7 +292,7 @@ def _upload_chunk(self, chunk_offset, chunk_data): # avoid uploading the empty pages if not self._is_chunk_empty(chunk_data): chunk_end = chunk_offset + len(chunk_data) - 1 - content_range = "bytes={0}-{1}".format(chunk_offset, chunk_end) + content_range = f"bytes={chunk_offset}-{chunk_end}" computed_md5 = None self.response_headers = self.service.upload_pages( body=chunk_data, @@ -302,8 +308,11 @@ def _upload_chunk(self, chunk_offset, chunk_data): if not self.parallel and self.request_options.get('modified_access_conditions'): self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + def _upload_substream_block(self, index, block_stream): + pass + -class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method +class AppendBlobChunkUploader(_ChunkUploader): def __init__(self, *args, **kwargs): super(AppendBlobChunkUploader, self).__init__(*args, **kwargs) @@ -332,8 +341,43 @@ def _upload_chunk(self, chunk_offset, chunk_data): **self.request_options ) + def _upload_substream_block(self, index, block_stream): + pass + + +class DataLakeFileChunkUploader(_ChunkUploader): + + def _upload_chunk(self, chunk_offset, chunk_data): + # avoid uploading the empty pages + self.response_headers = self.service.append_data( + body=chunk_data, + position=chunk_offset, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + + if not self.parallel and self.request_options.get('modified_access_conditions'): + self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + + def _upload_substream_block(self, index, block_stream): + try: + self.service.append_data( + body=block_stream, + position=index, + content_length=len(block_stream), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + finally: + block_stream.close() + -class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method +class FileChunkUploader(_ChunkUploader): def _upload_chunk(self, chunk_offset, chunk_data): length = len(chunk_data) @@ -346,7 +390,11 @@ def _upload_chunk(self, chunk_offset, chunk_data): upload_stream_current=self.progress_total, **self.request_options ) - return 'bytes={0}-{1}'.format(chunk_offset, chunk_end), response + return f'bytes={chunk_offset}-{chunk_end}', response + + # TODO: Implement this method. + def _upload_substream_block(self, index, block_stream): + pass class SubStream(IOBase): @@ -358,8 +406,8 @@ def __init__(self, wrapped_stream, stream_begin_index, length, lockObj): try: # only the main thread runs this, so there's no need grabbing the lock wrapped_stream.seek(0, SEEK_CUR) - except: - raise ValueError("Wrapped stream must support seek().") + except Exception as exc: + raise ValueError("Wrapped stream must support seek().") from exc self._lock = lockObj self._wrapped_stream = wrapped_stream @@ -432,6 +480,13 @@ def read(self, size=None): raise IOError("Stream failed to seek to the desired location.") buffer_from_stream = self._wrapped_stream.read(current_max_buffer_size) else: + absolute_position = self._stream_begin_index + self._position + # It's possible that there's connection problem during data transfer, + # so when we retry we don't want to read from current position of wrapped stream, + # instead we should seek to where we want to read from. + if self._wrapped_stream.tell() != absolute_position: + self._wrapped_stream.seek(absolute_position, SEEK_SET) + buffer_from_stream = self._wrapped_stream.read(current_max_buffer_size) if buffer_from_stream: @@ -523,13 +578,11 @@ def seekable(self): def __next__(self): return next(self.iterator) - next = __next__ # Python 2 compatibility. - def tell(self, *args, **kwargs): raise UnsupportedOperation("Data generator does not support tell.") def seek(self, *args, **kwargs): - raise UnsupportedOperation("Data generator is unseekable.") + raise UnsupportedOperation("Data generator is not seekable.") def read(self, size): data = self.leftover @@ -537,14 +590,15 @@ def read(self, size): try: while count < size: chunk = self.__next__() - if isinstance(chunk, six.text_type): + if isinstance(chunk, str): chunk = chunk.encode(self.encoding) data += chunk count += len(chunk) + # This means count < size and what's leftover will be returned in this call. except StopIteration: - pass + self.leftover = b"" - if count > size: + if count >= size: self.leftover = data[size:] return data[:size] diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/uploads_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/uploads_async.py index e598165330ae..3e102ec5daef 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/uploads_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared/uploads_async.py @@ -3,26 +3,40 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=no-self-use import asyncio +import inspect +import threading from asyncio import Lock +from io import UnsupportedOperation from itertools import islice -import threading - from math import ceil +from typing import AsyncGenerator, Union -import six - -from . import encode_base64, url_quote +from .import encode_base64, url_quote from .request_handlers import get_length from .response_handlers import return_response_headers -from .encryption import get_blob_encryptor_and_padder from .uploads import SubStream, IterStreamer # pylint: disable=unused-import -_LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 -_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = '{0} should be a seekable file-like/io.IOBase type stream object.' +async def _async_parallel_uploads(uploader, pending, running): + range_ids = [] + while True: + # Wait for some download to finish before adding a new one + done, running = await asyncio.wait(running, return_when=asyncio.FIRST_COMPLETED) + range_ids.extend([chunk.result() for chunk in done]) + try: + for _ in range(0, len(done)): + next_chunk = await pending.__anext__() + running.add(asyncio.ensure_future(uploader(next_chunk))) + except StopAsyncIteration: + break + + # Wait for the remaining uploads to finish + if running: + done, _running = await asyncio.wait(running) + range_ids.extend([chunk.result() for chunk in done]) + return range_ids async def _parallel_uploads(uploader, pending, running): @@ -52,17 +66,9 @@ async def upload_data_chunks( chunk_size=None, max_concurrency=None, stream=None, - encryption_options=None, + progress_hook=None, **kwargs): - if encryption_options: - encryptor, padder = get_blob_encryptor_and_padder( - encryption_options.get('cek'), - encryption_options.get('vector'), - uploader_class is not PageBlobChunkUploader) - kwargs['encryptor'] = encryptor - kwargs['padder'] = padder - parallel = max_concurrency > 1 if parallel and 'modified_access_conditions' in kwargs: # Access conditions do not work with parallelism @@ -74,18 +80,23 @@ async def upload_data_chunks( chunk_size=chunk_size, stream=stream, parallel=parallel, + progress_hook=progress_hook, **kwargs) if parallel: upload_tasks = uploader.get_chunk_streams() - running_futures = [ - asyncio.ensure_future(uploader.process_chunk(u)) - for u in islice(upload_tasks, 0, max_concurrency) - ] - range_ids = await _parallel_uploads(uploader.process_chunk, upload_tasks, running_futures) + running_futures = [] + for _ in range(max_concurrency): + try: + chunk = await upload_tasks.__anext__() + running_futures.append(asyncio.ensure_future(uploader.process_chunk(chunk))) + except StopAsyncIteration: + break + + range_ids = await _async_parallel_uploads(uploader.process_chunk, upload_tasks, running_futures) else: range_ids = [] - for chunk in uploader.get_chunk_streams(): + async for chunk in uploader.get_chunk_streams(): range_ids.append(await uploader.process_chunk(chunk)) if any(range_ids): @@ -100,6 +111,7 @@ async def upload_substream_blocks( chunk_size=None, max_concurrency=None, stream=None, + progress_hook=None, **kwargs): parallel = max_concurrency > 1 if parallel and 'modified_access_conditions' in kwargs: @@ -111,6 +123,7 @@ async def upload_substream_blocks( chunk_size=chunk_size, stream=stream, parallel=parallel, + progress_hook=progress_hook, **kwargs) if parallel: @@ -124,12 +137,23 @@ async def upload_substream_blocks( range_ids = [] for block in uploader.get_substream_blocks(): range_ids.append(await uploader.process_substream_block(block)) - return sorted(range_ids) + if any(range_ids): + return sorted(range_ids) + return class _ChunkUploader(object): # pylint: disable=too-many-instance-attributes - def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor=None, padder=None, **kwargs): + def __init__( + self, service, + total_size, + chunk_size, + stream, + parallel, + encryptor=None, + padder=None, + progress_hook=None, + **kwargs): self.service = service self.total_size = total_size self.chunk_size = chunk_size @@ -137,12 +161,12 @@ def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor= self.parallel = parallel # Stream management - self.stream_start = stream.tell() if parallel else None self.stream_lock = threading.Lock() if parallel else None # Progress feedback self.progress_total = 0 self.progress_lock = Lock() if parallel else None + self.progress_hook = progress_hook # Encryption self.encryptor = encryptor @@ -152,7 +176,7 @@ def __init__(self, service, total_size, chunk_size, stream, parallel, encryptor= self.last_modified = None self.request_options = kwargs - def get_chunk_streams(self): + async def get_chunk_streams(self): index = 0 while True: data = b'' @@ -163,7 +187,9 @@ def get_chunk_streams(self): if self.total_size: read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data))) temp = self.stream.read(read_size) - if not isinstance(temp, six.binary_type): + if inspect.isawaitable(temp): + temp = await temp + if not isinstance(temp, bytes): raise TypeError('Blob data should be of type bytes.') data += temp or b"" @@ -200,6 +226,9 @@ async def _update_progress(self, length): else: self.progress_total += length + if self.progress_hook: + await self.progress_hook(self.progress_total, self.total_size) + async def _upload_chunk(self, chunk_offset, chunk_data): raise NotImplementedError("Must be implemented by child class.") @@ -224,16 +253,16 @@ def get_substream_blocks(self): for i in range(blocks): index = i * self.chunk_size length = last_block_size if i == blocks - 1 else self.chunk_size - yield ('BlockId{}'.format("%05d" % i), SubStream(self.stream, index, length, lock)) + yield index, SubStream(self.stream, index, length, lock) async def process_substream_block(self, block_data): return await self._upload_substream_block_with_progress(block_data[0], block_data[1]) - async def _upload_substream_block(self, block_id, block_stream): + async def _upload_substream_block(self, index, block_stream): raise NotImplementedError("Must be implemented by child class.") - async def _upload_substream_block_with_progress(self, block_id, block_stream): - range_id = await self._upload_substream_block(block_id, block_stream) + async def _upload_substream_block_with_progress(self, index, block_stream): + range_id = await self._upload_substream_block(index, block_stream) await self._update_progress(len(block_stream)) return range_id @@ -251,19 +280,20 @@ def __init__(self, *args, **kwargs): async def _upload_chunk(self, chunk_offset, chunk_data): # TODO: This is incorrect, but works with recording. - index = '{0:032d}'.format(chunk_offset) + index = f'{chunk_offset:032d}' block_id = encode_base64(url_quote(encode_base64(index))) await self.service.stage_block( block_id, len(chunk_data), - chunk_data, + body=chunk_data, data_stream_total=self.total_size, upload_stream_current=self.progress_total, **self.request_options) return index, block_id - async def _upload_substream_block(self, block_id, block_stream): + async def _upload_substream_block(self, index, block_stream): try: + block_id = f'BlockId{(index//self.chunk_size):05}' await self.service.stage_block( block_id, len(block_stream), @@ -276,7 +306,7 @@ async def _upload_substream_block(self, block_id, block_stream): return block_id -class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method +class PageBlobChunkUploader(_ChunkUploader): def _is_chunk_empty(self, chunk_data): # read until non-zero byte is encountered @@ -290,7 +320,7 @@ async def _upload_chunk(self, chunk_offset, chunk_data): # avoid uploading the empty pages if not self._is_chunk_empty(chunk_data): chunk_end = chunk_offset + len(chunk_data) - 1 - content_range = 'bytes={0}-{1}'.format(chunk_offset, chunk_end) + content_range = f'bytes={chunk_offset}-{chunk_end}' computed_md5 = None self.response_headers = await self.service.upload_pages( body=chunk_data, @@ -305,8 +335,11 @@ async def _upload_chunk(self, chunk_offset, chunk_data): if not self.parallel and self.request_options.get('modified_access_conditions'): self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + async def _upload_substream_block(self, index, block_stream): + pass + -class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method +class AppendBlobChunkUploader(_ChunkUploader): def __init__(self, *args, **kwargs): super(AppendBlobChunkUploader, self).__init__(*args, **kwargs) @@ -333,18 +366,95 @@ async def _upload_chunk(self, chunk_offset, chunk_data): upload_stream_current=self.progress_total, **self.request_options) + async def _upload_substream_block(self, index, block_stream): + pass -class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + +class DataLakeFileChunkUploader(_ChunkUploader): async def _upload_chunk(self, chunk_offset, chunk_data): - chunk_end = chunk_offset + len(chunk_data) - 1 + self.response_headers = await self.service.append_data( + body=chunk_data, + position=chunk_offset, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + + if not self.parallel and self.request_options.get('modified_access_conditions'): + self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + + async def _upload_substream_block(self, index, block_stream): + try: + await self.service.append_data( + body=block_stream, + position=index, + content_length=len(block_stream), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + finally: + block_stream.close() + + +class FileChunkUploader(_ChunkUploader): + + async def _upload_chunk(self, chunk_offset, chunk_data): + length = len(chunk_data) + chunk_end = chunk_offset + length - 1 response = await self.service.upload_range( chunk_data, chunk_offset, - chunk_end, + length, data_stream_total=self.total_size, upload_stream_current=self.progress_total, **self.request_options ) - range_id = 'bytes={0}-{1}'.format(chunk_offset, chunk_end) + range_id = f'bytes={chunk_offset}-{chunk_end}' return range_id, response + + # TODO: Implement this method. + async def _upload_substream_block(self, index, block_stream): + pass + + +class AsyncIterStreamer(): + """ + File-like streaming object for AsyncGenerators. + """ + def __init__(self, generator: AsyncGenerator[Union[bytes, str], None], encoding: str = "UTF-8"): + self.iterator = generator.__aiter__() + self.leftover = b"" + self.encoding = encoding + + def seekable(self): + return False + + def tell(self, *args, **kwargs): + raise UnsupportedOperation("Data generator does not support tell.") + + def seek(self, *args, **kwargs): + raise UnsupportedOperation("Data generator is not seekable.") + + async def read(self, size: int) -> bytes: + data = self.leftover + count = len(self.leftover) + try: + while count < size: + chunk = await self.iterator.__anext__() + if isinstance(chunk, str): + chunk = chunk.encode(self.encoding) + data += chunk + count += len(chunk) + # This means count < size and what's leftover will be returned in this call. + except StopAsyncIteration: + self.leftover = b"" + + if count >= size: + self.leftover = data[size:] + + return data[:size] diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared_access_signature.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared_access_signature.py index 890ef1b2eeac..a3005be2ac64 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared_access_signature.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_shared_access_signature.py @@ -3,26 +3,22 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-keyword-should-match-keyword-only -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, TYPE_CHECKING +from typing import ( + Any, Callable, Optional, Union, + TYPE_CHECKING ) +from urllib.parse import parse_qs from ._shared import sign_string, url_quote from ._shared.constants import X_MS_VERSION -from ._shared.models import Services -from ._shared.shared_access_signature import SharedAccessSignature, _SharedAccessHelper, \ - QueryStringConstants +from ._shared.models import Services, UserDelegationKey +from ._shared.shared_access_signature import QueryStringConstants, SharedAccessSignature, _SharedAccessHelper if TYPE_CHECKING: from datetime import datetime - from ..blob import ( - ResourceTypes, - AccountSasPermissions, - UserDelegationKey, - ContainerSasPermissions, - BlobSasPermissions - ) + from ..blob import AccountSasPermissions, BlobSasPermissions, ContainerSasPermissions, ResourceTypes class BlobQueryStringConstants(object): @@ -37,13 +33,17 @@ class BlobSharedAccessSignature(SharedAccessSignature): generate_*_shared_access_signature method directly. ''' - def __init__(self, account_name, account_key=None, user_delegation_key=None): + def __init__( + self, account_name: str, + account_key: Optional[str] = None, + user_delegation_key: Optional[UserDelegationKey] = None + ) -> None: ''' :param str account_name: The storage account name used to generate the shared access signatures. - :param str account_key: + :param Optional[str] account_key: The access key to generate the shares access signatures. - :param ~azure.storage.blob.models.UserDelegationKey user_delegation_key: + :param Optional[~azure.storage.blob.models.UserDelegationKey] user_delegation_key: Instead of an account key, the user could pass in a user delegation key. A user delegation key can be obtained from the service by authenticating with an AAD identity; this can be accomplished by calling get_user_delegation_key on any Blob service object. @@ -51,11 +51,25 @@ def __init__(self, account_name, account_key=None, user_delegation_key=None): super(BlobSharedAccessSignature, self).__init__(account_name, account_key, x_ms_version=X_MS_VERSION) self.user_delegation_key = user_delegation_key - def generate_blob(self, container_name, blob_name, snapshot=None, version_id=None, permission=None, - expiry=None, start=None, policy_id=None, ip=None, protocol=None, - cache_control=None, content_disposition=None, - content_encoding=None, content_language=None, - content_type=None, **kwargs): + def generate_blob( + self, container_name: str, + blob_name: str, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + permission: Optional[Union["BlobSasPermissions", str]] = None, + expiry: Optional[Union["datetime", str]] = None, + start: Optional[Union["datetime", str]] = None, + policy_id: Optional[str] = None, + ip: Optional[str] = None, + protocol: Optional[str] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_type: Optional[str] = None, + sts_hook: Optional[Callable[[str], None]] = None, + **kwargs: Any + ) -> str: ''' Generates a shared access signature for the blob or one of its snapshots. Use the returned signature with the sas_token parameter of any BlobService. @@ -65,15 +79,21 @@ def generate_blob(self, container_name, blob_name, snapshot=None, version_id=Non :param str blob_name: Name of blob. :param str snapshot: - The snapshot parameter is an opaque DateTime value that, + The snapshot parameter is an opaque datetime value that, when present, specifies the blob snapshot to grant permission. - :param BlobSasPermissions permission: + :param str version_id: + An optional blob version ID. This parameter is only applicable for versioning-enabled + Storage accounts. Note that the 'versionid' query parameter is not included in the output + SAS. Therefore, please provide the 'version_id' parameter to any APIs when using the output + SAS to operate on a specific version. + :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. - Permissions must be ordered read, write, delete, list. + Permissions must be ordered racwdxytmei. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. + :type permission: str or BlobSasPermissions :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy @@ -85,9 +105,8 @@ def generate_blob(self, container_name, blob_name, snapshot=None, version_id=Non :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. + storage service receives the request. The provided datetime will always + be interpreted as UTC. :type start: datetime or str :param str policy_id: A unique value up to 64 characters in length that correlates to a @@ -117,6 +136,12 @@ def generate_blob(self, container_name, blob_name, snapshot=None, version_id=Non :param str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. + :param sts_hook: + For debugging purposes only. If provided, the hook is called with the string to sign + that was used to generate the SAS. + :type sts_hook: Optional[Callable[[str], None]] + :return: A Shared Access Signature (sas) token. + :rtype: str ''' resource_path = container_name + '/' + blob_name @@ -133,30 +158,46 @@ def generate_blob(self, container_name, blob_name, snapshot=None, version_id=Non sas.add_override_response_headers(cache_control, content_disposition, content_encoding, content_language, content_type) + sas.add_encryption_scope(**kwargs) sas.add_info_for_hns_account(**kwargs) sas.add_resource_signature(self.account_name, self.account_key, resource_path, user_delegation_key=self.user_delegation_key) + if sts_hook is not None: + sts_hook(sas.string_to_sign) + return sas.get_token() - def generate_container(self, container_name, permission=None, expiry=None, - start=None, policy_id=None, ip=None, protocol=None, - cache_control=None, content_disposition=None, - content_encoding=None, content_language=None, - content_type=None, **kwargs): + def generate_container( + self, container_name: str, + permission: Optional[Union["ContainerSasPermissions", str]] = None, + expiry: Optional[Union["datetime", str]] = None, + start: Optional[Union["datetime", str]] = None, + policy_id: Optional[str] = None, + ip: Optional[str] = None, + protocol: Optional[str] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_type: Optional[str] = None, + sts_hook: Optional[Callable[[str], None]] = None, + **kwargs: Any + ) -> str: ''' Generates a shared access signature for the container. Use the returned signature with the sas_token parameter of any BlobService. :param str container_name: Name of container. - :param ContainerSasPermissions permission: + :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. - Permissions must be ordered read, write, delete, list. + Permissions must be ordered racwdxyltfmei. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. + :type permission: str or ContainerSasPermissions :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy @@ -168,9 +209,8 @@ def generate_container(self, container_name, permission=None, expiry=None, :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. + storage service receives the request. The provided datetime will always + be interpreted as UTC. :type start: datetime or str :param str policy_id: A unique value up to 64 characters in length that correlates to a @@ -200,6 +240,12 @@ def generate_container(self, container_name, permission=None, expiry=None, :param str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. + :param sts_hook: + For debugging purposes only. If provided, the hook is called with the string to sign + that was used to generate the SAS. + :type sts_hook: Optional[Callable[[str], None]] + :return: A Shared Access Signature (sas) token. + :rtype: str ''' sas = _BlobSharedAccessHelper() sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) @@ -208,9 +254,14 @@ def generate_container(self, container_name, permission=None, expiry=None, sas.add_override_response_headers(cache_control, content_disposition, content_encoding, content_language, content_type) + sas.add_encryption_scope(**kwargs) sas.add_info_for_hns_account(**kwargs) sas.add_resource_signature(self.account_name, self.account_key, container_name, user_delegation_key=self.user_delegation_key) + + if sts_hook is not None: + sts_hook(sas.string_to_sign) + return sas.get_token() @@ -230,7 +281,6 @@ def get_value_to_append(self, query): return return_value + '\n' def add_resource_signature(self, account_name, account_key, path, user_delegation_key=None): - # pylint: disable = no-member if path[0] != '/': path = '/' + path @@ -271,6 +321,7 @@ def add_resource_signature(self, account_name, account_key, path, user_delegatio self.get_value_to_append(QueryStringConstants.SIGNED_VERSION) + self.get_value_to_append(QueryStringConstants.SIGNED_RESOURCE) + self.get_value_to_append(BlobQueryStringConstants.SIGNED_TIMESTAMP) + + self.get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE) + self.get_value_to_append(QueryStringConstants.SIGNED_CACHE_CONTROL) + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_DISPOSITION) + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_ENCODING) + @@ -284,25 +335,29 @@ def add_resource_signature(self, account_name, account_key, path, user_delegatio self._add_query(QueryStringConstants.SIGNED_SIGNATURE, sign_string(account_key if user_delegation_key is None else user_delegation_key.value, string_to_sign)) + self.string_to_sign = string_to_sign - def get_token(self): + def get_token(self) -> str: # a conscious decision was made to exclude the timestamp in the generated token # this is to avoid having two snapshot ids in the query parameters when the user appends the snapshot timestamp exclude = [BlobQueryStringConstants.SIGNED_TIMESTAMP] - return '&'.join(['{0}={1}'.format(n, url_quote(v)) + return '&'.join([f'{n}={url_quote(v)}' for n, v in self.query_dict.items() if v is not None and n not in exclude]) def generate_account_sas( - account_name, # type: str - account_key, # type: str - resource_types, # type: Union[ResourceTypes, str] - permission, # type: Union[AccountSasPermissions, str] - expiry, # type: Optional[Union[datetime, str]] - start=None, # type: Optional[Union[datetime, str]] - ip=None, # type: Optional[str] - **kwargs # type: Any - ): # type: (...) -> str + account_name: str, + account_key: str, + resource_types: Union["ResourceTypes", str], + permission: Union["AccountSasPermissions", str], + expiry: Union["datetime", str], + start: Optional[Union["datetime", str]] = None, + ip: Optional[str] = None, + *, + services: Union[Services, str] = Services(blob=True), + sts_hook: Optional[Callable[[str], None]] = None, + **kwargs: Any +) -> str: """Generates a shared access signature for the blob service. Use the returned signature with the credential parameter of any BlobServiceClient, @@ -318,24 +373,16 @@ def generate_account_sas( :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has been - specified in an associated stored access policy. :type permission: str or ~azure.storage.blob.AccountSasPermissions :param expiry: The time at which the shared access signature becomes invalid. - Required unless an id is given referencing a stored access policy - which contains this field. This field must be omitted if it has - been specified in an associated stored access policy. Azure will always - convert values to UTC. If a date is passed in without timezone info, it - is assumed to be UTC. + The provided datetime will always be interpreted as UTC. :type expiry: ~datetime.datetime or str :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. + storage service receives the request. The provided datetime will always + be interpreted as UTC. :type start: ~datetime.datetime or str :param str ip: Specifies an IP address or a range of IP addresses from which to accept requests. @@ -343,8 +390,17 @@ def generate_account_sas( or address range specified on the SAS token, the request is not authenticated. For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. + :keyword Union[Services, str] services: + Specifies the services that the Shared Access Signature (sas) token will be able to be utilized with. + Will default to only this package (i.e. blobs) if not provided. :keyword str protocol: Specifies the protocol permitted for a request made. The default value is https. + :keyword str encryption_scope: + Specifies the encryption scope for a request made so that all write operations will be service encrypted. + :keyword sts_hook: + For debugging purposes only. If provided, the hook is called with the string to sign + that was used to generate the SAS. + :paramtype sts_hook: Optional[Callable[[str], None]] :return: A Shared Access Signature (sas) token. :rtype: str @@ -359,29 +415,31 @@ def generate_account_sas( """ sas = SharedAccessSignature(account_name, account_key) return sas.generate_account( - services=Services(blob=True), + services=services, resource_types=resource_types, permission=permission, expiry=expiry, start=start, ip=ip, + sts_hook=sts_hook, **kwargs - ) # type: ignore + ) def generate_container_sas( - account_name, # type: str - container_name, # type: str - account_key=None, # type: Optional[str] - user_delegation_key=None, # type: Optional[UserDelegationKey] - permission=None, # type: Optional[Union[ContainerSasPermissions, str]] - expiry=None, # type: Optional[Union[datetime, str]] - start=None, # type: Optional[Union[datetime, str]] - policy_id=None, # type: Optional[str] - ip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Any + account_name: str, + container_name: str, + account_key: Optional[str] = None, + user_delegation_key: Optional[UserDelegationKey] = None, + permission: Optional[Union["ContainerSasPermissions", str]] = None, + expiry: Optional[Union["datetime", str]] = None, + start: Optional[Union["datetime", str]] = None, + policy_id: Optional[str] = None, + ip: Optional[str] = None, + *, + sts_hook: Optional[Callable[[str], None]] = None, + **kwargs: Any +) -> str: """Generates a shared access signature for a container. Use the returned signature with the credential parameter of any BlobServiceClient, @@ -402,7 +460,7 @@ def generate_container_sas( :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. - Permissions must be ordered read, write, delete, list. + Permissions must be ordered racwdxyltfmei. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. @@ -418,9 +476,8 @@ def generate_container_sas( :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. + storage service receives the request. The provided datetime will always + be interpreted as UTC. :type start: ~datetime.datetime or str :param str policy_id: A unique value up to 64 characters in length that correlates to a @@ -449,6 +506,15 @@ def generate_container_sas( :keyword str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. + :keyword str encryption_scope: + Specifies the encryption scope for a request made so that all write operations will be service encrypted. + :keyword str correlation_id: + The correlation id to correlate the storage audit logs with the audit logs used by the principal + generating and distributing the SAS. This can only be used when generating a SAS with delegation key. + :keyword sts_hook: + For debugging purposes only. If provided, the hook is called with the string to sign + that was used to generate the SAS. + :paramtype sts_hook: Optional[Callable[[str], None]] :return: A Shared Access Signature (sas) token. :rtype: str @@ -461,9 +527,15 @@ def generate_container_sas( :dedent: 12 :caption: Generating a sas token. """ + if not policy_id: + if not expiry: + raise ValueError("'expiry' parameter must be provided when not using a stored access policy.") + if not permission: + raise ValueError("'permission' parameter must be provided when not using a stored access policy.") if not user_delegation_key and not account_key: raise ValueError("Either user_delegation_key or account_key must be provided.") - + if isinstance(account_key, UserDelegationKey): + user_delegation_key = account_key if user_delegation_key: sas = BlobSharedAccessSignature(account_name, user_delegation_key=user_delegation_key) else: @@ -475,25 +547,27 @@ def generate_container_sas( start=start, policy_id=policy_id, ip=ip, + sts_hook=sts_hook, **kwargs ) def generate_blob_sas( - account_name, # type: str - container_name, # type: str - blob_name, # type: str - snapshot=None, # type: Optional[str] - account_key=None, # type: Optional[str] - user_delegation_key=None, # type: Optional[UserDelegationKey] - permission=None, # type: Optional[Union[BlobSasPermissions, str]] - expiry=None, # type: Optional[Union[datetime, str]] - start=None, # type: Optional[Union[datetime, str]] - policy_id=None, # type: Optional[str] - ip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Any + account_name: str, + container_name: str, + blob_name: str, + snapshot: Optional[str] = None, + account_key: Optional[str] = None, + user_delegation_key: Optional[UserDelegationKey] = None, + permission: Optional[Union["BlobSasPermissions", str]] = None, + expiry: Optional[Union["datetime", str]] = None, + start: Optional[Union["datetime", str]] = None, + policy_id: Optional[str] = None, + ip: Optional[str] = None, + *, + sts_hook: Optional[Callable[[str], None]] = None, + **kwargs: Any +) -> str: """Generates a shared access signature for a blob. Use the returned signature with the credential parameter of any BlobServiceClient, @@ -518,7 +592,7 @@ def generate_blob_sas( :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. - Permissions must be ordered read, write, delete, list. + Permissions must be ordered racwdxytmei. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. @@ -534,9 +608,8 @@ def generate_blob_sas( :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the - storage service receives the request. Azure will always convert values - to UTC. If a date is passed in without timezone info, it is assumed to - be UTC. + storage service receives the request. The provided datetime will always + be interpreted as UTC. :type start: ~datetime.datetime or str :param str policy_id: A unique value up to 64 characters in length that correlates to a @@ -549,7 +622,10 @@ def generate_blob_sas( For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. :keyword str version_id: - An optional blob version ID. This parameter is only for versioning enabled account + An optional blob version ID. This parameter is only applicable for versioning-enabled + Storage accounts. Note that the 'versionid' query parameter is not included in the output + SAS. Therefore, please provide the 'version_id' parameter to any APIs when using the output + SAS to operate on a specific version. .. versionadded:: 12.4.0 This keyword argument was introduced in API version '2019-12-12'. @@ -570,11 +646,27 @@ def generate_blob_sas( :keyword str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. + :keyword str encryption_scope: + Specifies the encryption scope for a request made so that all write operations will be service encrypted. + :keyword str correlation_id: + The correlation id to correlate the storage audit logs with the audit logs used by the principal + generating and distributing the SAS. This can only be used when generating a SAS with delegation key. + :keyword sts_hook: + For debugging purposes only. If provided, the hook is called with the string to sign + that was used to generate the SAS. + :paramtype sts_hook: Optional[Callable[[str], None]] :return: A Shared Access Signature (sas) token. :rtype: str """ + if not policy_id: + if not expiry: + raise ValueError("'expiry' parameter must be provided when not using a stored access policy.") + if not permission: + raise ValueError("'permission' parameter must be provided when not using a stored access policy.") if not user_delegation_key and not account_key: raise ValueError("Either user_delegation_key or account_key must be provided.") + if isinstance(account_key, UserDelegationKey): + user_delegation_key = account_key version_id = kwargs.pop('version_id', None) if version_id and snapshot: raise ValueError("snapshot and version_id cannot be set at the same time.") @@ -592,5 +684,16 @@ def generate_blob_sas( start=start, policy_id=policy_id, ip=ip, + sts_hook=sts_hook, **kwargs ) + +def _is_credential_sastoken(credential: Any) -> bool: + if not credential or not isinstance(credential, str): + return False + + sas_values = QueryStringConstants.to_list() + parsed_query = parse_qs(credential.lstrip("?")) + if parsed_query and all(k in sas_values for k in parsed_query): + return True + return False diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_upload_helpers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_upload_helpers.py index 94313f635e43..2ce55f7ab237 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_upload_helpers.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_upload_helpers.py @@ -3,33 +3,39 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=no-self-use from io import SEEK_SET, UnsupportedOperation -from typing import Optional, Union, Any, TypeVar, TYPE_CHECKING # pylint: disable=unused-import +from typing import Any, cast, Dict, IO, Optional, TypeVar, TYPE_CHECKING -import six from azure.core.exceptions import ResourceExistsError, ResourceModifiedError, HttpResponseError -from ._shared.response_handlers import ( - process_storage_error, - return_response_headers) +from ._encryption import ( + _ENCRYPTION_PROTOCOL_V1, + _ENCRYPTION_PROTOCOL_V2, + encrypt_blob, + GCMBlobEncryptionStream, + generate_blob_encryption_data, + get_adjusted_upload_size, + get_blob_encryptor_and_padder +) +from ._generated.models import ( + AppendPositionAccessConditions, + BlockLookupList, + ModifiedAccessConditions +) from ._shared.models import StorageErrorCode +from ._shared.response_handlers import process_storage_error, return_response_headers from ._shared.uploads import ( - upload_data_chunks, - upload_substream_blocks, + AppendBlobChunkUploader, BlockBlobChunkUploader, PageBlobChunkUploader, - AppendBlobChunkUploader) -from ._shared.encryption import generate_blob_encryption_data, encrypt_blob -from ._generated.models import ( - BlockLookupList, - AppendPositionAccessConditions, - ModifiedAccessConditions, + upload_data_chunks, + upload_substream_blocks ) if TYPE_CHECKING: - from datetime import datetime # pylint: disable=unused-import + from ._generated.operations import AppendBlobOperations, BlockBlobOperations, PageBlobOperations + from ._shared.models import StorageConfiguration BlobLeaseClient = TypeVar("BlobLeaseClient") _LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 @@ -58,41 +64,46 @@ def _any_conditions(modified_access_conditions=None, **kwargs): # pylint: disab ]) -def upload_block_blob( # pylint: disable=too-many-locals - client=None, - data=None, - stream=None, - length=None, - overwrite=None, - headers=None, - validate_content=None, - max_concurrency=None, - blob_settings=None, - encryption_options=None, - **kwargs): +def upload_block_blob( # pylint: disable=too-many-locals, too-many-statements + client: "BlockBlobOperations", + stream: IO, + overwrite: bool, + encryption_options: Dict[str, Any], + blob_settings: "StorageConfiguration", + headers: Dict[str, Any], + validate_content: bool, + max_concurrency: Optional[int], + length: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: try: if not overwrite and not _any_conditions(**kwargs): kwargs['modified_access_conditions'].if_none_match = '*' adjusted_count = length if (encryption_options.get('key') is not None) and (adjusted_count is not None): - adjusted_count += (16 - (length % 16)) + adjusted_count = get_adjusted_upload_size(adjusted_count, encryption_options['version']) blob_headers = kwargs.pop('blob_headers', None) tier = kwargs.pop('standard_blob_tier', None) blob_tags_string = kwargs.pop('blob_tags_string', None) + immutability_policy = kwargs.pop('immutability_policy', None) + immutability_policy_expiry = None if immutability_policy is None else immutability_policy.expiry_time + immutability_policy_mode = None if immutability_policy is None else immutability_policy.policy_mode + legal_hold = kwargs.pop('legal_hold', None) + progress_hook = kwargs.pop('progress_hook', None) + # Do single put if the size is smaller than or equal config.max_single_put_size if adjusted_count is not None and (adjusted_count <= blob_settings.max_single_put_size): - try: - data = data.read(length) - if not isinstance(data, six.binary_type): - raise TypeError('Blob data should be of type bytes.') - except AttributeError: - pass + data = stream.read(length or -1) + if not isinstance(data, bytes): + raise TypeError('Blob data should be of type bytes.') + if encryption_options.get('key'): - encryption_data, data = encrypt_blob(data, encryption_options['key']) + encryption_data, data = encrypt_blob(data, encryption_options['key'], encryption_options['version']) headers['x-ms-meta-encryptiondata'] = encryption_data - return client.upload( - body=data, + + response = client.upload( + body=data, # type: ignore [arg-type] content_length=adjusted_count, blob_http_headers=blob_headers, headers=headers, @@ -102,8 +113,16 @@ def upload_block_blob( # pylint: disable=too-many-locals upload_stream_current=0, tier=tier.value if tier else None, blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, **kwargs) + if progress_hook: + progress_hook(adjusted_count, adjusted_count) + + return cast(Dict[str, Any], response) + use_original_upload_path = blob_settings.use_byte_buffer or \ validate_content or encryption_options.get('required') or \ blob_settings.max_block_size < blob_settings.min_large_block_upload_threshold or \ @@ -111,20 +130,37 @@ def upload_block_blob( # pylint: disable=too-many-locals not hasattr(stream, 'seek') or not hasattr(stream, 'tell') if use_original_upload_path: - if encryption_options.get('key'): - cek, iv, encryption_data = generate_blob_encryption_data(encryption_options['key']) - headers['x-ms-meta-encryptiondata'] = encryption_data - encryption_options['cek'] = cek - encryption_options['vector'] = iv + total_size = length + encryptor, padder = None, None + if encryption_options and encryption_options.get('key'): + cek, iv, encryption_metadata = generate_blob_encryption_data( + encryption_options['key'], + encryption_options['version']) + headers['x-ms-meta-encryptiondata'] = encryption_metadata + + if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V1: + encryptor, padder = get_blob_encryptor_and_padder(cek, iv, True) + + # Adjust total_size for encryption V2 + if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V2: + # Adjust total_size for encryption V2 + total_size = adjusted_count + # V2 wraps the data stream with an encryption stream + if cek is None: + raise ValueError("Generate encryption metadata failed. 'cek' is None.") + stream = GCMBlobEncryptionStream(cek, stream) # type: ignore [assignment] + block_ids = upload_data_chunks( service=client, uploader_class=BlockBlobChunkUploader, - total_size=length, + total_size=total_size, chunk_size=blob_settings.max_block_size, max_concurrency=max_concurrency, stream=stream, validate_content=validate_content, - encryption_options=encryption_options, + progress_hook=progress_hook, + encryptor=encryptor, + padder=padder, headers=headers, **kwargs ) @@ -137,13 +173,14 @@ def upload_block_blob( # pylint: disable=too-many-locals max_concurrency=max_concurrency, stream=stream, validate_content=validate_content, + progress_hook=progress_hook, headers=headers, **kwargs ) block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) block_lookup.latest = block_ids - return client.commit_block_list( + return cast(Dict[str, Any], client.commit_block_list( block_lookup, blob_http_headers=blob_headers, cls=return_response_headers, @@ -151,7 +188,10 @@ def upload_block_blob( # pylint: disable=too-many-locals headers=headers, tier=tier.value if tier else None, blob_tags_string=blob_tags_string, - **kwargs) + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + **kwargs)) except HttpResponseError as error: try: process_storage_error(error) @@ -162,48 +202,63 @@ def upload_block_blob( # pylint: disable=too-many-locals def upload_page_blob( - client=None, - stream=None, - length=None, - overwrite=None, - headers=None, - validate_content=None, - max_concurrency=None, - blob_settings=None, - encryption_options=None, - **kwargs): + client: "PageBlobOperations", + overwrite: bool, + encryption_options: Dict[str, Any], + blob_settings: "StorageConfiguration", + headers: Dict[str, Any], + stream: IO, + length: Optional[int] = None, + validate_content: Optional[bool] = None, + max_concurrency: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: try: if not overwrite and not _any_conditions(**kwargs): kwargs['modified_access_conditions'].if_none_match = '*' if length is None or length < 0: raise ValueError("A content length must be specified for a Page Blob.") if length % 512 != 0: - raise ValueError("Invalid page blob size: {0}. " - "The size must be aligned to a 512-byte boundary.".format(length)) + raise ValueError(f"Invalid page blob size: {length}. " + "The size must be aligned to a 512-byte boundary.") + tier = None if kwargs.get('premium_page_blob_tier'): premium_page_blob_tier = kwargs.pop('premium_page_blob_tier') try: - headers['x-ms-access-tier'] = premium_page_blob_tier.value + tier = premium_page_blob_tier.value except AttributeError: - headers['x-ms-access-tier'] = premium_page_blob_tier - if encryption_options and encryption_options.get('data'): - headers['x-ms-meta-encryptiondata'] = encryption_options['data'] + tier = premium_page_blob_tier + + if encryption_options and encryption_options.get('key'): + cek, iv, encryption_data = generate_blob_encryption_data( + encryption_options['key'], + encryption_options['version']) + headers['x-ms-meta-encryptiondata'] = encryption_data + blob_tags_string = kwargs.pop('blob_tags_string', None) + progress_hook = kwargs.pop('progress_hook', None) - response = client.create( + response = cast(Dict[str, Any], client.create( content_length=0, blob_content_length=length, - blob_sequence_number=None, + blob_sequence_number=None, # type: ignore [arg-type] blob_http_headers=kwargs.pop('blob_headers', None), blob_tags_string=blob_tags_string, + tier=tier, cls=return_response_headers, headers=headers, - **kwargs) + **kwargs)) if length == 0: - return response + return cast(Dict[str, Any], response) + + if encryption_options and encryption_options.get('key'): + if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V1: + encryptor, padder = get_blob_encryptor_and_padder(cek, iv, False) + kwargs['encryptor'] = encryptor + kwargs['padder'] = padder kwargs['modified_access_conditions'] = ModifiedAccessConditions(if_match=response['etag']) - return upload_data_chunks( + return cast(Dict[str, Any], upload_data_chunks( service=client, uploader_class=PageBlobChunkUploader, total_size=length, @@ -211,9 +266,9 @@ def upload_page_blob( stream=stream, max_concurrency=max_concurrency, validate_content=validate_content, - encryption_options=encryption_options, + progress_hook=progress_hook, headers=headers, - **kwargs) + **kwargs)) except HttpResponseError as error: try: @@ -225,16 +280,17 @@ def upload_page_blob( def upload_append_blob( # pylint: disable=unused-argument - client=None, - stream=None, - length=None, - overwrite=None, - headers=None, - validate_content=None, - max_concurrency=None, - blob_settings=None, - encryption_options=None, - **kwargs): + client: "AppendBlobOperations", + overwrite: bool, + encryption_options: Dict[str, Any], + blob_settings: "StorageConfiguration", + headers: Dict[str, Any], + stream: IO, + length: Optional[int] = None, + validate_content: Optional[bool] = None, + max_concurrency: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: try: if length == 0: return {} @@ -243,6 +299,7 @@ def upload_append_blob( # pylint: disable=unused-argument max_size=kwargs.pop('maxsize_condition', None), append_position=None) blob_tags_string = kwargs.pop('blob_tags_string', None) + progress_hook = kwargs.pop('progress_hook', None) try: if overwrite: @@ -252,7 +309,7 @@ def upload_append_blob( # pylint: disable=unused-argument headers=headers, blob_tags_string=blob_tags_string, **kwargs) - return upload_data_chunks( + return cast(Dict[str, Any], upload_data_chunks( service=client, uploader_class=AppendBlobChunkUploader, total_size=length, @@ -261,26 +318,27 @@ def upload_append_blob( # pylint: disable=unused-argument max_concurrency=max_concurrency, validate_content=validate_content, append_position_access_conditions=append_conditions, + progress_hook=progress_hook, headers=headers, - **kwargs) + **kwargs)) except HttpResponseError as error: - if error.response.status_code != 404: + if error.response.status_code != 404: # type: ignore [union-attr] raise # rewind the request body if it is a stream if hasattr(stream, 'read'): try: # attempt to rewind the body to the initial position stream.seek(0, SEEK_SET) - except UnsupportedOperation: + except UnsupportedOperation as exc: # if body is not seekable, then retry would not work - raise error + raise error from exc client.create( content_length=0, blob_http_headers=blob_headers, headers=headers, blob_tags_string=blob_tags_string, **kwargs) - return upload_data_chunks( + return cast(Dict[str, Any], upload_data_chunks( service=client, uploader_class=AppendBlobChunkUploader, total_size=length, @@ -289,7 +347,8 @@ def upload_append_blob( # pylint: disable=unused-argument max_concurrency=max_concurrency, validate_content=validate_content, append_position_access_conditions=append_conditions, + progress_hook=progress_hook, headers=headers, - **kwargs) + **kwargs)) except HttpResponseError as error: process_storage_error(error) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_version.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_version.py index 8d23bd9195d5..5192aed4c84b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_version.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/_version.py @@ -4,4 +4,4 @@ # license information. # -------------------------------------------------------------------------- -VERSION = "12.7.1" +VERSION = "12.25.0b1" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/__init__.py index 33c10319aaa0..a755e6a2d59b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/__init__.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/__init__.py @@ -3,9 +3,12 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +# pylint: disable=docstring-keyword-should-match-keyword-only import os +from typing import Any, AnyStr, Dict, cast, IO, Iterable, Optional, Union, TYPE_CHECKING +from ._list_blobs_helper import BlobPrefix from .._models import BlobType from .._shared.policies_async import ExponentialRetry, LinearRetry from ._blob_client_async import BlobClient @@ -14,13 +17,17 @@ from ._lease_async import BlobLeaseClient from ._download_async import StorageStreamDownloader +if TYPE_CHECKING: + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential + from azure.core.credentials_async import AsyncTokenCredential + async def upload_blob_to_url( - blob_url, # type: str - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - credential=None, # type: Any - **kwargs): - # type: (...) -> dict[str, Any] + blob_url: str, + data: Union[Iterable[AnyStr], IO[AnyStr]], + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any +) -> Dict[str, Any]: """Upload data to a given URL The data will be uploaded as a block blob. @@ -33,10 +40,17 @@ async def upload_blob_to_url( :param credential: The credentials with which to authenticate. This is optional if the blob URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob_to_url will overwrite any existing data. If set to False, the @@ -60,24 +74,27 @@ async def upload_blob_to_url( :keyword str encoding: Encoding to use if text is supplied as input. Defaults to UTF-8. :returns: Blob-updated property dict (Etag and last modified) - :rtype: dict(str, Any) + :rtype: dict[str, Any] """ async with BlobClient.from_blob_url(blob_url, credential=credential) as client: - return await client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs) + return await cast(BlobClient, client).upload_blob( + data=data, + blob_type=BlobType.BLOCKBLOB, + **kwargs) +# Download data to specified open file-handle. async def _download_to_stream(client, handle, **kwargs): - """Download data to specified open file-handle.""" stream = await client.download_blob(**kwargs) await stream.readinto(handle) async def download_blob_from_url( - blob_url, # type: str - output, # type: str - credential=None, # type: Any - **kwargs): - # type: (...) -> None + blob_url: str, + output: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any +) -> None: """Download the contents of a blob to a local file or stream. :param str blob_url: @@ -89,10 +106,17 @@ async def download_blob_from_url( :param credential: The credentials with which to authenticate. This is optional if the blob URL already has a SAS token or the blob is public. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials.TokenCredential or + str or dict[str, str] or None :keyword bool overwrite: Whether the local file should be overwritten if it already exists. The default value is `False` - in which case a ValueError will be raised if the file already exists. If set to @@ -123,7 +147,7 @@ async def download_blob_from_url( await _download_to_stream(client, output, **kwargs) else: if not overwrite and os.path.isfile(output): - raise ValueError("The file '{}' already exists.".format(output)) + raise ValueError(f"The file '{output}' already exists.") with open(output, 'wb') as file_handle: await _download_to_stream(client, file_handle, **kwargs) @@ -132,6 +156,7 @@ async def download_blob_from_url( 'upload_blob_to_url', 'download_blob_from_url', 'BlobServiceClient', + 'BlobPrefix', 'ContainerClient', 'BlobClient', 'BlobLeaseClient', diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_blob_client_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_blob_client_async.py index d67271a00411..7cb074487f58 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_blob_client_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_blob_client_async.py @@ -3,45 +3,93 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=too-many-lines, invalid-overridden-method +# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only + +import warnings +from datetime import datetime from functools import partial -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, IO, Iterable, AnyStr, Dict, List, Tuple, +from typing import ( + Any, AnyStr, AsyncIterable, cast, Dict, IO, Iterable, List, Optional, overload, Tuple, Union, TYPE_CHECKING ) +from typing_extensions import Self +from azure.core.async_paging import AsyncItemPaged +from azure.core.exceptions import ResourceNotFoundError, HttpResponseError, ResourceExistsError +from azure.core.pipeline import AsyncPipeline +from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.exceptions import ResourceNotFoundError, HttpResponseError -from .._shared.base_client_async import AsyncStorageAccountHostsMixin -from .._shared.policies_async import ExponentialRetry -from .._shared.response_handlers import return_response_headers, process_storage_error -from .._deserialize import get_page_ranges_result, parse_tags, deserialize_pipeline_response_into_cls -from .._serialize import get_modify_conditions, get_api_version, get_access_conditions -from .._generated.aio import AzureBlobStorage -from .._generated.models import CpkInfo -from .._deserialize import deserialize_blob_properties -from .._blob_client import BlobClient as BlobClientBase +from ._download_async import StorageStreamDownloader +from ._lease_async import BlobLeaseClient +from ._models import PageRangePaged from ._upload_helpers import ( - upload_block_blob, upload_append_blob, - upload_page_blob) -from .._models import BlobType, BlobBlock, BlobProperties -from ._lease_async import BlobLeaseClient -from ._download_async import StorageStreamDownloader - + upload_block_blob, + upload_page_blob +) +from .._blob_client import StorageAccountHostsMixin +from .._blob_client_helpers import ( + _abort_copy_options, + _append_block_from_url_options, + _append_block_options, + _clear_page_options, + _commit_block_list_options, + _create_append_blob_options, + _create_page_blob_options, + _create_snapshot_options, + _delete_blob_options, + _download_blob_options, + _format_url, + _from_blob_url, + _get_blob_tags_options, + _get_block_list_result, + _get_page_ranges_options, + _parse_url, + _resize_blob_options, + _seal_append_blob_options, + _set_blob_metadata_options, + _set_blob_tags_options, + _set_http_headers_options, + _set_sequence_number_options, + _stage_block_from_url_options, + _stage_block_options, + _start_copy_from_url_options, + _upload_blob_from_url_options, + _upload_blob_options, + _upload_page_options, + _upload_pages_from_url_options +) +from .._deserialize import ( + deserialize_blob_properties, + deserialize_pipeline_response_into_cls, + get_page_ranges_result, + parse_tags +) +from .._encryption import StorageEncryptionMixin, _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION +from .._generated.aio import AzureBlobStorage +from .._generated.models import CpkInfo +from .._models import BlobType, BlobBlock, BlobProperties, PageRange +from .._serialize import get_access_conditions, get_api_version, get_modify_conditions, get_version_id +from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper, parse_connection_str +from .._shared.policies_async import ExponentialRetry +from .._shared.response_handlers import process_storage_error, return_response_headers if TYPE_CHECKING: - from datetime import datetime - from .._models import ( # pylint: disable=unused-import + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential + from azure.core.credentials_async import AsyncTokenCredential + from azure.core.pipeline.policies import AsyncHTTPPolicy + from azure.storage.blob.aio import ContainerClient + from .._models import ( ContentSettings, + ImmutabilityPolicy, PremiumPageBlobTier, - StandardBlobTier, - SequenceNumberAction + SequenceNumberAction, + StandardBlobTier ) -class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase): # pylint: disable=too-many-public-methods +class BlobClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, StorageEncryptionMixin): # type: ignore [misc] # pylint: disable=too-many-public-methods """A client to interact with a specific blob, although that blob may not yet exist. :param str account_url: @@ -58,13 +106,15 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase): # pylint: disa :param credential: The credentials with which to authenticate. This is optional if the account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. :keyword str api_version: - The Storage API version to use for requests. Default value is '2019-07-07'. - Setting to an older version may result in reduced feature compatibility. + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. .. versionadded:: 12.2.0 @@ -83,6 +133,11 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase): # pylint: disa the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, or 4MB. + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. .. admonition:: Example: @@ -101,30 +156,145 @@ class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase): # pylint: disa :caption: Creating the BlobClient from a SAS URL to a blob. """ def __init__( - self, account_url, # type: str - container_name, # type: str - blob_name, # type: str - snapshot=None, # type: Optional[Union[str, Dict[str, Any]]] - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None + self, account_url: str, + container_name: str, + blob_name: str, + snapshot: Optional[Union[str, Dict[str, Any]]] = None, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) - super(BlobClient, self).__init__( - account_url, + parsed_url, sas_token, path_snapshot = _parse_url( + account_url=account_url, container_name=container_name, - blob_name=blob_name, - snapshot=snapshot, - credential=credential, - **kwargs) - self._client = AzureBlobStorage(url=self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access - self._loop = kwargs.get('loop', None) + blob_name=blob_name) + self.container_name = container_name + self.blob_name = blob_name + + if snapshot is not None and hasattr(snapshot, 'snapshot'): + self.snapshot = snapshot.snapshot + elif isinstance(snapshot, dict): + self.snapshot = snapshot['snapshot'] + else: + self.snapshot = snapshot or path_snapshot + self.version_id = kwargs.pop('version_id', None) + + # This parameter is used for the hierarchy traversal. Give precedence to credential. + self._raw_credential = credential if credential else sas_token + self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot) + super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._configure_encryption(kwargs) + + def _format_url(self, hostname: str) -> str: + return _format_url( + container_name=self.container_name, + scheme=self.scheme, + blob_name=self.blob_name, + query_str=self._query_str, + hostname=hostname + ) + + @classmethod + def from_blob_url( + cls, blob_url: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + snapshot: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> Self: + """Create BlobClient from a blob url. This doesn't support customized blob url with '/' in blob name. + + :param str blob_url: + The full endpoint URL to the Blob, including SAS token and snapshot if used. This could be + either the primary endpoint, or the secondary endpoint depending on the current `location_mode`. + :type blob_url: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] # pylint: disable=line-too-long + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`create_snapshot`. If specified, this will override + the snapshot in the url. + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :returns: A Blob client. + :rtype: ~azure.storage.blob.BlobClient + """ + account_url, container_name, blob_name, path_snapshot = _from_blob_url(blob_url=blob_url, snapshot=snapshot) + return cls( + account_url, container_name=container_name, blob_name=blob_name, + snapshot=path_snapshot, credential=credential, **kwargs + ) + + @classmethod + def from_connection_string( + cls, conn_str: str, + container_name: str, + blob_name: str, + snapshot: Optional[Union[str, Dict[str, Any]]] = None, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: + """Create BlobClient from a Connection String. + + :param str conn_str: + A connection string to an Azure Storage account. + :param container_name: The container name for the blob. + :type container_name: str + :param blob_name: The name of the blob with which to interact. + :type blob_name: str + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`create_snapshot`. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. + Credentials provided here will take precedence over those in the connection string. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] # pylint: disable=line-too-long + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :returns: A Blob client. + :rtype: ~azure.storage.blob.BlobClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START auth_from_connection_string_blob] + :end-before: [END auth_from_connection_string_blob] + :language: python + :dedent: 8 + :caption: Creating the BlobClient from a connection string. + """ + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') + if 'secondary_hostname' not in kwargs: + kwargs['secondary_hostname'] = secondary + return cls( + account_url, container_name=container_name, blob_name=blob_name, + snapshot=snapshot, credential=credential, **kwargs + ) @distributed_trace_async - async def get_account_information(self, **kwargs): # type: ignore - # type: (Optional[int]) -> Dict[str, str] + async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: """Gets information related to the storage account in which the blob resides. The information can also be retrieved if the user has a SAS to a container or blob. @@ -134,13 +304,18 @@ async def get_account_information(self, **kwargs): # type: ignore :rtype: dict(str, str) """ try: - return await self._client.blob.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + return cast(Dict[str, str], + await self._client.blob.get_account_info(cls=return_response_headers, **kwargs)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def upload_blob_from_url(self, source_url, **kwargs): - # type: (str, Any) -> Dict[str, Any] + async def upload_blob_from_url( + self, source_url: str, + *, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Any]: """ Creates a new Block Blob where the content of the blob is read from a given URL. The content of an existing blob is overwritten with the new blob. @@ -148,15 +323,17 @@ async def upload_blob_from_url(self, source_url, **kwargs): :param str source_url: A URL of up to 2 KB in length that specifies a file or blob. The value should be URL-encoded as it would appear in a request URI. - If the source is in another account, the source must either be public - or must be authenticated via a shared access signature. If the source - is public, no authentication is required. + The source must either be public or must be authenticated via a shared + access signature as part of the url or using the source_authorization keyword. + If the source is public, no authentication is required. Examples: https://myaccount.blob.core.windows.net/mycontainer/myblob https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken + :keyword dict(str, str) metadata: + Name-value pairs associated with the blob as metadata. :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob will overwrite the existing data. If set to False, the operation will fail with ResourceExistsError. @@ -167,7 +344,7 @@ async def upload_blob_from_url(self, source_url, **kwargs): The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) :paramtype tags: dict(str, str) :keyword bytearray source_content_md5: Specify the md5 that is used to verify the integrity of the source bytes. @@ -211,7 +388,11 @@ async def upload_blob_from_url(self, source_url, **kwargs): valid, the operation fails with status code 412 (Precondition Failed). :paramtype destination_lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword ~azure.storage.blob.ContentSettings content_settings: ContentSettings object used to set blob properties. Used to set content type, encoding, language, disposition, md5, and cache control. @@ -228,27 +409,35 @@ async def upload_blob_from_url(self, source_url, **kwargs): :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: A standard blob tier value to set the blob to. For this version of the library, this is only applicable to block blobs on standard storage accounts. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Response from creating a new block blob for a given URL. + :rtype: Dict[str, Any] """ - options = self._upload_blob_from_url_options( - source_url=self._encode_source_url(source_url), + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_blob_from_url_options( + source_url=source_url, + metadata=metadata, **kwargs) try: - return await self._client.block_blob.put_blob_from_url(**options) + return cast(Dict[str, Any], await self._client.block_blob.put_blob_from_url(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async async def upload_blob( - self, data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] - length=None, # type: Optional[int] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Any + self, data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], + blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, + length: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Creates a new blob from a data source with automatic chunking. :param data: The blob data to upload. + :type data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]] :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. :param int length: @@ -262,7 +451,7 @@ async def upload_blob( The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -317,6 +506,20 @@ async def upload_blob( A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on premium storage accounts. + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: A standard blob tier value to set the blob to. For this version of the library, this is only applicable to block blobs on standard storage accounts. @@ -327,8 +530,9 @@ async def upload_blob( value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). :keyword int max_concurrency: - Maximum number of parallel connections to use when the blob size exceeds - 64MB. + Maximum number of parallel connections to use when transferring the blob in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: Encrypts the data on the service-side with the given key. Use of customer-provided keys must be done over HTTPS. @@ -344,8 +548,18 @@ async def upload_blob( :keyword str encoding: Defaults to UTF-8. + :keyword progress_hook: + An async callback to track the progress of a long running upload. The signature is + function(current: int, total: Optional[int]) where current is the number of bytes transferred + so far, and total is the size of the blob or None if the size is unknown. + :paramtype progress_hook: Callable[[int, Optional[int]], Awaitable[None]] :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. multiple calls to the Azure service and the timeout will apply to each call individually. :returns: Blob-updated property dict (Etag and last modified) @@ -360,24 +574,62 @@ async def upload_blob( :dedent: 16 :caption: Upload a blob to the container. """ - options = self._upload_blob_options( - data, + if self.require_encryption and not self.key_encryption_key: + raise ValueError("Encryption required but no key was provided.") + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_blob_options( + data=data, blob_type=blob_type, length=length, metadata=metadata, + encryption_options={ + 'required': self.require_encryption, + 'version': self.encryption_version, + 'key': self.key_encryption_key, + 'resolver': self.key_resolver_function + }, + config=self._config, + sdk_moniker=self._sdk_moniker, + client=self._client, **kwargs) if blob_type == BlobType.BlockBlob: - return await upload_block_blob(**options) + return cast(Dict[str, Any], await upload_block_blob(**options)) if blob_type == BlobType.PageBlob: - return await upload_page_blob(**options) - return await upload_append_blob(**options) + return cast(Dict[str, Any], await upload_page_blob(**options)) + return cast(Dict[str, Any], await upload_append_blob(**options)) + + @overload + async def download_blob( + self, offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: str, + **kwargs: Any + ) -> StorageStreamDownloader[str]: + ... + + @overload + async def download_blob( + self, offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: None = None, + **kwargs: Any + ) -> StorageStreamDownloader[bytes]: + ... @distributed_trace_async - async def download_blob(self, offset=None, length=None, **kwargs): - # type: (Optional[int], Optional[int], Any) -> StorageStreamDownloader + async def download_blob( + self, offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: Union[str, None] = None, + **kwargs: Any + ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: """Downloads a blob to the StorageStreamDownloader. The readall() method must be used to read all the content or readinto() must be used to download the blob into - a stream. + a stream. Using chunks() returns an async iterator which allows the user to iterate over the content in chunks. :param int offset: Start of byte range to use for downloading a section of the blob. @@ -390,6 +642,7 @@ async def download_blob(self, offset=None, length=None, **kwargs): value that, when present, specifies the version of the blob to download. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword bool validate_content: @@ -435,11 +688,23 @@ async def download_blob(self, offset=None, length=None, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int max_concurrency: - The number of parallel connections with which to download. + Maximum number of parallel connections to use when transferring the blob in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword str encoding: Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword progress_hook: + An async callback to track the progress of a long running download. The signature is + function(current: int, total: int) where current is the number of bytes transferred + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], Awaitable[None]] :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. multiple calls to the Azure service and the timeout will apply to each call individually. :returns: A streaming object (StorageStreamDownloader) @@ -454,17 +719,35 @@ async def download_blob(self, offset=None, length=None, **kwargs): :dedent: 16 :caption: Download a blob. """ - options = self._download_blob_options( + if self.require_encryption and not (self.key_encryption_key or self.key_resolver_function): + raise ValueError("Encryption required but no key was provided.") + if length is not None and offset is None: + raise ValueError("Offset value must not be None if length is set.") + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _download_blob_options( + blob_name=self.blob_name, + container_name=self.container_name, + version_id=get_version_id(self.version_id, kwargs), offset=offset, length=length, + encoding=encoding, + encryption_options={ + 'required': self.require_encryption, + 'version': self.encryption_version, + 'key': self.key_encryption_key, + 'resolver': self.key_resolver_function + }, + config=self._config, + sdk_moniker=self._sdk_moniker, + client=self._client, **kwargs) downloader = StorageStreamDownloader(**options) await downloader._setup() # pylint: disable=protected-access return downloader @distributed_trace_async - async def delete_blob(self, delete_snapshots=False, **kwargs): - # type: (str, Any) -> None + async def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: Any) -> None: """Marks the specified blob for deletion. The blob is later deleted during garbage collection. @@ -487,6 +770,7 @@ async def delete_blob(self, delete_snapshots=False, **kwargs): value that, when present, specifies the version of the blob to delete. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword lease: @@ -518,7 +802,11 @@ async def delete_blob(self, delete_snapshots=False, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -530,22 +818,33 @@ async def delete_blob(self, delete_snapshots=False, **kwargs): :dedent: 16 :caption: Delete a blob. """ - options = self._delete_blob_options(delete_snapshots=delete_snapshots, **kwargs) + options = _delete_blob_options( + snapshot=self.snapshot, + version_id=get_version_id(self.version_id, kwargs), + delete_snapshots=delete_snapshots, + **kwargs) try: await self._client.blob.delete(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def undelete_blob(self, **kwargs): - # type: (Any) -> None + async def undelete_blob(self, **kwargs: Any) -> None: """Restores soft-deleted blobs or snapshots. Operation will only be successful if used within the specified number of days set in the delete retention policy. + If blob versioning is enabled, the base blob cannot be restored using this + method. Instead use :func:`start_copy_from_url` with the URL of the blob version + you wish to promote to the current version. + :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -563,24 +862,33 @@ async def undelete_blob(self, **kwargs): process_storage_error(error) @distributed_trace_async - async def exists(self, **kwargs): - # type: (**Any) -> bool + async def exists(self, **kwargs: Any) -> bool: """ Returns True if a blob exists with the defined parameters, and returns False otherwise. - :param str version_id: + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to check if it exists. - :param int timeout: - The timeout parameter is expressed in seconds. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: boolean + :rtype: bool """ + version_id = get_version_id(self.version_id, kwargs) try: await self._client.blob.get_properties( snapshot=self.snapshot, + version_id=version_id, **kwargs) return True + # Encrypted with CPK + except ResourceExistsError: + return True except HttpResponseError as error: try: process_storage_error(error) @@ -588,8 +896,7 @@ async def exists(self, **kwargs): return False @distributed_trace_async - async def get_blob_properties(self, **kwargs): - # type: (Any) -> BlobProperties + async def get_blob_properties(self, **kwargs: Any) -> BlobProperties: """Returns all user-defined metadata, standard HTTP properties, and system properties for the blob. It does not return the content of the blob. @@ -602,6 +909,7 @@ async def get_blob_properties(self, **kwargs): value that, when present, specifies the version of the blob to get properties. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword ~datetime.datetime if_modified_since: @@ -633,7 +941,11 @@ async def get_blob_properties(self, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: BlobProperties :rtype: ~azure.storage.blob.BlobProperties @@ -648,6 +960,7 @@ async def get_blob_properties(self, **kwargs): """ access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) + version_id = get_version_id(self.version_id, kwargs) cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: @@ -661,7 +974,7 @@ async def get_blob_properties(self, **kwargs): kwargs['cls'] = partial(deserialize_pipeline_response_into_cls, cls_method) blob_props = await self._client.blob.get_properties( timeout=kwargs.pop('timeout', None), - version_id=kwargs.pop('version_id', None), + version_id=version_id, snapshot=self.snapshot, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, @@ -674,11 +987,13 @@ async def get_blob_properties(self, **kwargs): if isinstance(blob_props, BlobProperties): blob_props.container = self.container_name blob_props.snapshot = self.snapshot - return blob_props # type: ignore + return cast(BlobProperties, blob_props) @distributed_trace_async - async def set_http_headers(self, content_settings=None, **kwargs): - # type: (Optional[ContentSettings], Any) -> None + async def set_http_headers( + self, content_settings: Optional["ContentSettings"] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Sets system properties on the blob. If one property is set for the content_settings, all properties will be overridden. @@ -714,19 +1029,25 @@ async def set_http_headers(self, content_settings=None, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Any] """ - options = self._set_http_headers_options(content_settings=content_settings, **kwargs) + options = _set_http_headers_options(content_settings=content_settings, **kwargs) try: - return await self._client.blob.set_http_headers(**options) # type: ignore + return cast(Dict[str, Any], await self._client.blob.set_http_headers(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def set_blob_metadata(self, metadata=None, **kwargs): - # type: (Optional[Dict[str, str]], Any) -> Dict[str, Union[str, datetime]] + async def set_blob_metadata( + self, metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets user-defined metadata for the blob as one or more name-value pairs. :param metadata: @@ -775,24 +1096,114 @@ async def set_blob_metadata(self, metadata=None, **kwargs): .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified) + :rtype: Dict[str, Union[str, datetime]] """ - options = self._set_blob_metadata_options(metadata=metadata, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _set_blob_metadata_options(metadata=metadata, **kwargs) try: - return await self._client.blob.set_metadata(**options) # type: ignore + return cast(Dict[str, Union[str, datetime]], await self._client.blob.set_metadata(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def create_page_blob( # type: ignore - self, size, # type: int - content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] - premium_page_blob_tier=None, # type: Optional[Union[str, PremiumPageBlobTier]] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + async def set_immutability_policy( + self, immutability_policy: "ImmutabilityPolicy", + **kwargs: Any + ) -> Dict[str, str]: + """The Set Immutability Policy operation sets the immutability policy on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :param ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + version_id = get_version_id(self.version_id, kwargs) + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + return cast(Dict[str, str], await self._client.blob.set_immutability_policy( + cls=return_response_headers,version_id=version_id, **kwargs)) + + @distributed_trace_async + async def delete_immutability_policy(self, **kwargs: Any) -> None: + """The Delete Immutability Policy operation deletes the immutability policy on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + version_id = get_version_id(self.version_id, kwargs) + await self._client.blob.delete_immutability_policy(version_id=version_id, **kwargs) + + @distributed_trace_async + async def set_legal_hold(self, legal_hold: bool, **kwargs: Any) -> Dict[str, Union[str, datetime, bool]]: + """The Set Legal Hold operation sets a legal hold on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :param bool legal_hold: + Specified if a legal hold should be set on the blob. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, Union[str, datetime, bool]] + """ + + version_id = get_version_id(self.version_id, kwargs) + return cast(Dict[str, Union[str, datetime, bool]], await self._client.blob.set_legal_hold( + legal_hold, version_id=version_id, cls=return_response_headers, **kwargs)) + + @distributed_trace_async + async def create_page_blob( + self, size: int, + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + premium_page_blob_tier: Optional[Union[str, "PremiumPageBlobTier"]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Creates a new Page Blob of the specified size. :param int size: @@ -813,7 +1224,7 @@ async def create_page_blob( # type: ignore The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -826,6 +1237,18 @@ async def create_page_blob( # type: ignore Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -857,25 +1280,38 @@ async def create_page_blob( # type: ignore .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict[str, Any] """ - options = self._create_page_blob_options( - size, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _create_page_blob_options( + size=size, content_settings=content_settings, metadata=metadata, premium_page_blob_tier=premium_page_blob_tier, **kwargs) try: - return await self._client.page_blob.create(**options) # type: ignore + return cast(Dict[str, Any], await self._client.page_blob.create(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def create_append_blob(self, content_settings=None, metadata=None, **kwargs): - # type: (Optional[ContentSettings], Optional[Dict[str, str]], Any) -> Dict[str, Union[str, datetime]] - """Creates a new Append Blob. + async def create_append_blob( + self, content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: + """Creates a new Append Blob. This operation creates a new 0-length append blob. The content + of any existing blob is overwritten with the newly initialized append blob. To add content to + the append blob, call the :func:`append_block` or :func:`append_block_from_url` method. :param ~azure.storage.blob.ContentSettings content_settings: ContentSettings object used to set blob properties. Used to set content type, encoding, @@ -888,11 +1324,23 @@ async def create_append_blob(self, content_settings=None, metadata=None, **kwarg The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 :paramtype tags: dict(str, str) + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -928,22 +1376,32 @@ async def create_append_blob(self, content_settings=None, metadata=None, **kwarg .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict[str, Any] """ - options = self._create_append_blob_options( + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _create_append_blob_options( content_settings=content_settings, metadata=metadata, **kwargs) try: - return await self._client.append_blob.create(**options) # type: ignore + return cast(Dict[str, Union[str, datetime]], await self._client.append_blob.create(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def create_snapshot(self, metadata=None, **kwargs): - # type: (Optional[Dict[str, str]], Any) -> Dict[str, Union[str, datetime]] + async def create_snapshot( + self, metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Creates a snapshot of the blob. A snapshot is a read-only version of a blob that's taken at a point in time. @@ -998,7 +1456,11 @@ async def create_snapshot(self, metadata=None, **kwargs): .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Snapshot ID, Etag, and last modified). :rtype: dict[str, Any] @@ -1011,20 +1473,29 @@ async def create_snapshot(self, metadata=None, **kwargs): :dedent: 12 :caption: Create a snapshot of the blob. """ - options = self._create_snapshot_options(metadata=metadata, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _create_snapshot_options(metadata=metadata, **kwargs) try: - return await self._client.blob.create_snapshot(**options) # type: ignore + return cast(Dict[str, Any], await self._client.blob.create_snapshot(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, **kwargs): - # type: (str, Optional[Dict[str, str]], bool, Any) -> Any - """Copies a blob asynchronously. - - This operation returns a copy operation - object that can be used to wait on the completion of the operation, - as well as check status or abort the copy operation. + async def start_copy_from_url( + self, source_url: str, + metadata: Optional[Dict[str, str]] = None, + incremental_copy: bool = False, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: + """Copies a blob from the given URL. + + This operation returns a dictionary containing `copy_status` and `copy_id`, + which can be used to check the status of or abort the copy operation. + `copy_status` will be 'success' if the copy completed synchronously or + 'pending' if the copy has been started asynchronously. For asynchronous copies, + the status can be checked by polling the :func:`get_blob_properties` method and + checking the copy status. Set `requires_sync` to True to force the copy to be synchronous. The Blob service copies blobs on a best-effort basis. The source blob for a copy operation may be a block blob, an append blob, @@ -1047,10 +1518,6 @@ async def start_copy_from_url(self, source_url, metadata=None, incremental_copy= end of the copy operation, the destination blob will have the same committed block count as the source. - For all blob types, you can call status() on the returned polling object - to check the status of the copy operation, or wait() to block until the - operation is complete. The final blob will be committed when the copy completes. - :param str source_url: A URL of up to 2 KB in length that specifies a file or blob. The value should be URL-encoded as it would appear in a request URI. @@ -1081,11 +1548,26 @@ async def start_copy_from_url(self, source_url, metadata=None, incremental_copy= The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_). + + The (case-sensitive) literal "COPY" can instead be passed to copy tags from the source blob. + This option is only available when `incremental_copy=False` and `requires_sync=True`. .. versionadded:: 12.4.0 - :paramtype tags: dict(str, str) + :paramtype tags: dict(str, str) or Literal["COPY"] + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword ~datetime.datetime source_if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -1140,7 +1622,11 @@ async def start_copy_from_url(self, source_url, metadata=None, incremental_copy= the lease ID given matches the active lease ID of the source blob. :paramtype source_lease: ~azure.storage.blob.aio.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -1157,8 +1643,23 @@ async def start_copy_from_url(self, source_url, metadata=None, incremental_copy= :keyword bool requires_sync: Enforces that the service will not return a response until the copy is complete. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. This option is only available when `incremental_copy` is + set to False and `requires_sync` is set to True. + + .. versionadded:: 12.9.0 + + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the sync copied blob. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.10.0 + :returns: A dictionary of copy properties (etag, last_modified, copy_id, copy_status). - :rtype: dict[str, str or ~datetime.datetime] + :rtype: dict[str, Union[str, ~datetime.datetime]] .. admonition:: Example: @@ -1169,21 +1670,23 @@ async def start_copy_from_url(self, source_url, metadata=None, incremental_copy= :dedent: 16 :caption: Copy a blob from a URL. """ - options = self._start_copy_from_url_options( - source_url=self._encode_source_url(source_url), + options = _start_copy_from_url_options( + source_url=source_url, metadata=metadata, incremental_copy=incremental_copy, **kwargs) try: if incremental_copy: - return await self._client.page_blob.copy_incremental(**options) - return await self._client.blob.start_copy_from_url(**options) + return cast(Dict[str, Union[str, datetime]], await self._client.page_blob.copy_incremental(**options)) + return cast(Dict[str, Union[str, datetime]], await self._client.blob.start_copy_from_url(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def abort_copy(self, copy_id, **kwargs): - # type: (Union[str, Dict[str, Any], BlobProperties], Any) -> None + async def abort_copy( + self, copy_id: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any + ) -> None: """Abort an ongoing copy operation. This will leave a destination blob with zero length and full metadata. @@ -1204,15 +1707,18 @@ async def abort_copy(self, copy_id, **kwargs): :dedent: 16 :caption: Abort copying a blob from URL. """ - options = self._abort_copy_options(copy_id, **kwargs) + options = _abort_copy_options(copy_id, **kwargs) try: await self._client.blob.abort_copy_from_url(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): - # type: (int, Optional[str], Any) -> BlobLeaseClient + async def acquire_lease( + self, lease_duration: int =-1, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> BlobLeaseClient: """Requests a new lease. If the blob does not have an active lease, the Blob @@ -1251,7 +1757,11 @@ async def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A BlobLeaseClient object. :rtype: ~azure.storage.blob.aio.BlobLeaseClient @@ -1264,13 +1774,12 @@ async def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): :dedent: 12 :caption: Acquiring a lease on a blob. """ - lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore + lease = BlobLeaseClient(self, lease_id=lease_id) await lease.acquire(lease_duration=lease_duration, **kwargs) return lease @distributed_trace_async - async def set_standard_blob_tier(self, standard_blob_tier, **kwargs): - # type: (Union[str, StandardBlobTier], Any) -> None + async def set_standard_blob_tier(self, standard_blob_tier: Union[str, "StandardBlobTier"], **kwargs: Any) -> None: """This operation sets the tier on a block blob. A block blob's tier determines Hot/Cool/Archive storage type. @@ -1293,7 +1802,11 @@ async def set_standard_blob_tier(self, standard_blob_tier, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -1302,6 +1815,7 @@ async def set_standard_blob_tier(self, standard_blob_tier, **kwargs): """ access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) + version_id = get_version_id(self.version_id, kwargs) if standard_blob_tier is None: raise ValueError("A StandardBlobTier must be specified") try: @@ -1310,24 +1824,25 @@ async def set_standard_blob_tier(self, standard_blob_tier, **kwargs): timeout=kwargs.pop('timeout', None), modified_access_conditions=mod_conditions, lease_access_conditions=access_conditions, + version_id=version_id, **kwargs) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async async def stage_block( - self, block_id, # type: str - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> None + self, block_id: str, + data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + length: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Creates a new block to be committed as part of a blob. :param str block_id: A string value that identifies the block. The string should be less than or equal to 64 bytes in size. For a given blob, the block_id must be the same size for each block. :param data: The blob data. + :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] :param int length: Size of the block. :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the blob. The storage @@ -1358,29 +1873,37 @@ async def stage_block( .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. - :rtype: None + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: Blob property dict. + :rtype: Dict[str, Any] """ - options = self._stage_block_options( - block_id, - data, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _stage_block_options( + block_id=block_id, + data=data, length=length, **kwargs) try: - return await self._client.block_blob.stage_block(**options) + return cast(Dict[str, Any], await self._client.block_blob.stage_block(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async async def stage_block_from_url( - self, block_id, # type: str - source_url, # type: str - source_offset=None, # type: Optional[int] - source_length=None, # type: Optional[int] - source_content_md5=None, # type: Optional[Union[bytes, bytearray]] - **kwargs - ): - # type: (...) -> None + self, block_id: str, + source_url: str, + source_offset: Optional[int] = None, + source_length: Optional[int] = None, + source_content_md5: Optional[Union[bytes, bytearray]] = None, + **kwargs: Any + ) -> Dict[str, Any]: """Creates a new block to be committed as part of a blob where the contents are read from a URL. @@ -1413,24 +1936,36 @@ async def stage_block_from_url( .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. - :rtype: None + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Blob property dict. + :rtype: Dict[str, Any] """ - options = self._stage_block_from_url_options( - block_id, - source_url=self._encode_source_url(source_url), + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _stage_block_from_url_options( + block_id=block_id, + source_url=source_url, source_offset=source_offset, source_length=source_length, source_content_md5=source_content_md5, **kwargs) try: - return await self._client.block_blob.stage_block_from_url(**options) + return cast(Dict[str, Any], await self._client.block_blob.stage_block_from_url(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def get_block_list(self, block_list_type="committed", **kwargs): - # type: (Optional[str], Any) -> Tuple[List[BlobBlock], List[BlobBlock]] + async def get_block_list( + self, block_list_type: str = "committed", + **kwargs: Any + ) -> Tuple[List[BlobBlock], List[BlobBlock]]: """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block blob. @@ -1449,9 +1984,13 @@ async def get_block_list(self, block_list_type="committed", **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A tuple of two lists - committed and uncommitted blocks - :rtype: tuple(list(~azure.storage.blob.BlobBlock), list(~azure.storage.blob.BlobBlock)) + :rtype: Tuple[List[BlobBlock], List[BlobBlock]] """ access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) @@ -1465,16 +2004,15 @@ async def get_block_list(self, block_list_type="committed", **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - return self._get_block_list_result(blocks) + return _get_block_list_result(blocks) @distributed_trace_async - async def commit_block_list( # type: ignore - self, block_list, # type: List[BlobBlock] - content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + async def commit_block_list( + self, block_list: List[BlobBlock], + content_settings: Optional["ContentSettings"] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """The Commit Block List operation writes a blob by specifying the list of block IDs that make up the blob. @@ -1491,7 +2029,7 @@ async def commit_block_list( # type: ignore The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) .. versionadded:: 12.4.0 @@ -1500,6 +2038,18 @@ async def commit_block_list( # type: ignore Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + :keyword bool validate_content: If true, calculates an MD5 hash of the page content. The storage service checks the hash of the content that has arrived @@ -1547,23 +2097,30 @@ async def commit_block_list( # type: ignore .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._commit_block_list_options( - block_list, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _commit_block_list_options( + block_list=block_list, content_settings=content_settings, metadata=metadata, **kwargs) try: - return await self._client.block_blob.commit_block_list(**options) # type: ignore + return cast(Dict[str, Any], await self._client.block_blob.commit_block_list(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): - # type: (Union[str, PremiumPageBlobTier], **Any) -> None + async def set_premium_page_blob_tier(self, premium_page_blob_tier: "PremiumPageBlobTier", **kwargs: Any) -> None: """Sets the page blob tiers on the blob. This API is only supported for page blobs on premium accounts. :param premium_page_blob_tier: @@ -1578,9 +2135,11 @@ async def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make - multiple calls to the Azure service and the timeout will apply to - each call individually. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword lease: Required if the blob has an active lease. Value can be a BlobLeaseClient object or the lease ID as a string. @@ -1602,8 +2161,7 @@ async def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): process_storage_error(error) @distributed_trace_async - async def set_blob_tags(self, tags=None, **kwargs): - # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] + async def set_blob_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: """The Set Tags operation enables users to set tags on a blob or specific blob version, but not snapshot. Each call to this operation replaces all existing tags attached to the blob. To remove all tags from the blob, call this operation with no tags set. @@ -1616,7 +2174,7 @@ async def set_blob_tags(self, tags=None, **kwargs): The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, and tag values must be between 0 and 256 characters. Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), - space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) :type tags: dict(str, str) :keyword str version_id: The version id parameter is an opaque DateTime @@ -1636,19 +2194,23 @@ async def set_blob_tags(self, tags=None, **kwargs): or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified) :rtype: Dict[str, Any] """ - options = self._set_blob_tags_options(tags=tags, **kwargs) + version_id = get_version_id(self.version_id, kwargs) + options = _set_blob_tags_options(version_id=version_id, tags=tags, **kwargs) try: - return await self._client.blob.set_tags(**options) + return cast(Dict[str, Any], await self._client.blob.set_tags(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def get_blob_tags(self, **kwargs): - # type: (**Any) -> Dict[str, str] + async def get_blob_tags(self, **kwargs: Any) -> Dict[str, str]: """The Get Tags operation enables users to get tags on a blob or specific blob version, but not snapshot. .. versionadded:: 12.4.0 @@ -1665,26 +2227,30 @@ async def get_blob_tags(self, **kwargs): or the lease ID as a string. :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Key value pairs of blob tags. :rtype: Dict[str, str] """ - options = self._get_blob_tags_options(**kwargs) + version_id = get_version_id(self.version_id, kwargs) + options = _get_blob_tags_options(version_id=version_id, snapshot=self.snapshot, **kwargs) try: _, tags = await self._client.blob.get_tags(**options) - return parse_tags(tags) # pylint: disable=protected-access + return cast(Dict[str, str], parse_tags(tags)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def get_page_ranges( # type: ignore - self, offset=None, # type: Optional[int] - length=None, # type: Optional[int] - previous_snapshot_diff=None, # type: Optional[Union[str, Dict[str, Any]]] - **kwargs - ): - # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] - """Returns the list of valid page ranges for a Page Blob or snapshot + async def get_page_ranges( + self, offset: Optional[int] = None, + length: Optional[int] = None, + previous_snapshot_diff: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: + """DEPRECATED: Returns the list of valid page ranges for a Page Blob or snapshot of a page blob. :param int offset: @@ -1733,13 +2299,23 @@ async def get_page_ranges( # type: ignore .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. The first element are filled page ranges, the 2nd element is cleared page ranges. :rtype: tuple(list(dict(str, str), list(dict(str, str)) """ - options = self._get_page_ranges_options( + warnings.warn( + "get_page_ranges is deprecated, use list_page_ranges instead", + DeprecationWarning + ) + + options = _get_page_ranges_options( + snapshot=self.snapshot, offset=offset, length=length, previous_snapshot_diff=previous_snapshot_diff, @@ -1753,14 +2329,104 @@ async def get_page_ranges( # type: ignore process_storage_error(error) return get_page_ranges_result(ranges) + @distributed_trace + def list_page_ranges( + self, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + previous_snapshot: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> AsyncItemPaged[PageRange]: + """Returns the list of valid page ranges for a Page Blob or snapshot + of a page blob. If `previous_snapshot` is specified, the result will be + a diff of changes between the target blob and the previous snapshot. + + :keyword int offset: + Start of byte range to use for getting valid page ranges. + If no length is given, all bytes after the offset will be searched. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword int length: + Number of bytes to use for getting valid page ranges. + If length is given, offset must be provided. + This range will return valid page ranges from the offset start up to + the specified length. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword previous_snapshot: + A snapshot value that specifies that the response will contain only pages that were changed + between target blob and previous snapshot. Changed pages include both updated and cleared + pages. The target blob may be a snapshot, as long as the snapshot specified by `previous_snapshot` + is the older of the two. + :paramtype previous_snapshot: str or Dict[str, Any] + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int results_per_page: + The maximum number of page ranges to retrieve per API call. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: An iterable (auto-paging) of PageRange. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.PageRange] + """ + results_per_page = kwargs.pop('results_per_page', None) + options = _get_page_ranges_options( + snapshot=self.snapshot, + offset=offset, + length=length, + previous_snapshot_diff=previous_snapshot, + **kwargs) + + if previous_snapshot: + command = partial( + self._client.page_blob.get_page_ranges_diff, + **options) + else: + command = partial( + self._client.page_blob.get_page_ranges, + **options) + return AsyncItemPaged( + command, results_per_page=results_per_page, + page_iterator_class=PageRangePaged) + @distributed_trace_async async def get_page_range_diff_for_managed_disk( - self, previous_snapshot_url, # type: str - offset=None, # type: Optional[int] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] + self, previous_snapshot_url: str, + offset: Optional[int] = None, + length: Optional[int] = None, + **kwargs: Any + ) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]]: """Returns the list of valid page ranges for a managed disk or snapshot. .. note:: @@ -1769,7 +2435,7 @@ async def get_page_range_diff_for_managed_disk( .. versionadded:: 12.2.0 This operation was introduced in API version '2019-07-07'. - :param previous_snapshot_url: + :param str previous_snapshot_url: Specifies the URL of a previous snapshot of the managed disk. The response will only contain pages that were changed between the target blob and its previous snapshot. @@ -1809,13 +2475,18 @@ async def get_page_range_diff_for_managed_disk( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. The first element are filled page ranges, the 2nd element is cleared page ranges. :rtype: tuple(list(dict(str, str), list(dict(str, str)) """ - options = self._get_page_ranges_options( + options = _get_page_ranges_options( + snapshot=self.snapshot, offset=offset, length=length, prev_snapshot_url=previous_snapshot_url, @@ -1827,12 +2498,11 @@ async def get_page_range_diff_for_managed_disk( return get_page_ranges_result(ranges) @distributed_trace_async - async def set_sequence_number( # type: ignore - self, sequence_number_action, # type: Union[str, SequenceNumberAction] - sequence_number=None, # type: Optional[str] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + async def set_sequence_number( + self, sequence_number_action: Union[str, "SequenceNumberAction"], + sequence_number: Optional[str] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets the blob sequence number. :param str sequence_number_action: @@ -1870,20 +2540,22 @@ async def set_sequence_number( # type: ignore .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._set_sequence_number_options( - sequence_number_action, sequence_number=sequence_number, **kwargs) + options = _set_sequence_number_options(sequence_number_action, sequence_number=sequence_number, **kwargs) try: - return await self._client.page_blob.update_sequence_number(**options) # type: ignore + return cast(Dict[str, Any], await self._client.page_blob.update_sequence_number(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def resize_blob(self, size, **kwargs): - # type: (int, Any) -> Dict[str, Union[str, datetime]] + async def resize_blob(self, size: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: """Resizes a page blob to the specified size. If the specified value is less than the current size of the blob, @@ -1924,24 +2596,29 @@ async def resize_blob(self, size, **kwargs): blob and number of allowed IOPS. This is only applicable to page blobs on premium storage accounts. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._resize_blob_options(size, **kwargs) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _resize_blob_options(size=size, **kwargs) try: - return await self._client.page_blob.resize(**options) # type: ignore + return cast(Dict[str, Any], await self._client.page_blob.resize(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def upload_page( # type: ignore - self, page, # type: bytes - offset, # type: int - length, # type: int - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + async def upload_page( + self, page: bytes, + offset: int, + length: int, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """The Upload Pages operation writes a range of pages to a page blob. :param bytes page: @@ -2015,28 +2692,36 @@ async def upload_page( # type: ignore :keyword str encoding: Defaults to UTF-8. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._upload_page_options( + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_page_options( page=page, offset=offset, length=length, **kwargs) try: - return await self._client.page_blob.upload_pages(**options) # type: ignore + return cast(Dict[str, Any], await self._client.page_blob.upload_pages(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def upload_pages_from_url(self, source_url, # type: str - offset, # type: int - length, # type: int - source_offset, # type: int - **kwargs - ): - # type: (...) -> Dict[str, Any] + async def upload_pages_from_url( + self, source_url: str, + offset: int, + length: int, + source_offset: int, + **kwargs: Any + ) -> Dict[str, Any]: """ The Upload Pages operation writes a range of pages to a page blob where the contents are read from a URL. @@ -2126,24 +2811,36 @@ async def upload_pages_from_url(self, source_url, # type: str .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Response after uploading pages from specified URL. + :rtype: Dict[str, Any] """ - options = self._upload_pages_from_url_options( - source_url=self._encode_source_url(source_url), + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _upload_pages_from_url_options( + source_url=source_url, offset=offset, length=length, source_offset=source_offset, **kwargs ) try: - return await self._client.page_blob.upload_pages_from_url(**options) # type: ignore + return cast(Dict[str, Any], await self._client.page_blob.upload_pages_from_url(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def clear_page(self, offset, length, **kwargs): - # type: (int, int, Any) -> Dict[str, Union[str, datetime]] + async def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, Union[str, datetime]]: """Clears a range of pages. :param int offset: @@ -2198,27 +2895,39 @@ async def clear_page(self, offset, length, **kwargs): As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag and last modified). :rtype: dict(str, Any) """ - options = self._clear_page_options(offset, length, **kwargs) + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _clear_page_options( + offset=offset, + length=length, + **kwargs + ) try: - return await self._client.page_blob.clear_pages(**options) # type: ignore + return cast(Dict[str, Any], await self._client.page_blob.clear_pages(**options)) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def append_block( # type: ignore - self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] - length=None, # type: Optional[int] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime, int]] + async def append_block( + self, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + length: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime, int]]: """Commits a new block of data to the end of the existing append blob. :param data: Content of the block. + :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] :param int length: Size of the block in bytes. :keyword bool validate_content: @@ -2283,26 +2992,35 @@ async def append_block( # type: ignore .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). :rtype: dict(str, Any) """ - options = self._append_block_options( - data, + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _append_block_options( + data=data, length=length, **kwargs ) try: - return await self._client.append_blob.append_block(**options) # type: ignore + return cast(Dict[str, Any], await self._client.append_blob.append_block(**options)) except HttpResponseError as error: process_storage_error(error) - @distributed_trace_async() - async def append_block_from_url(self, copy_source_url, # type: str - source_offset=None, # type: Optional[int] - source_length=None, # type: Optional[int] - **kwargs): - # type: (...) -> Dict[str, Union[str, datetime, int]] + @distributed_trace_async + async def append_block_from_url( + self, copy_source_url: str, + source_offset: Optional[int] = None, + source_length: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime, int]]: """ Creates a new block to be committed as part of a blob, where the contents are read from a source url. @@ -2386,22 +3104,35 @@ async def append_block_from_url(self, copy_source_url, # type: str .. versionadded:: 12.2.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Result after appending a new block. + :rtype: Dict[str, Union[str, datetime, int]] """ - options = self._append_block_from_url_options( - copy_source_url=self._encode_source_url(copy_source_url), + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + if kwargs.get('cpk') and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options = _append_block_from_url_options( + copy_source_url=copy_source_url, source_offset=source_offset, source_length=source_length, **kwargs ) try: - return await self._client.append_blob.append_block_from_url(**options) # type: ignore + return cast(Dict[str, Union[str, datetime, int]], + await self._client.append_blob.append_block_from_url(**options)) except HttpResponseError as error: process_storage_error(error) - @distributed_trace_async() - async def seal_append_blob(self, **kwargs): - # type: (...) -> Dict[str, Union[str, datetime, int]] + @distributed_trace_async + async def seal_append_blob(self, **kwargs: Any) -> Dict[str, Union[str, datetime, int]]: """The Seal operation seals the Append Blob to make it read-only. .. versionadded:: 12.4.0 @@ -2434,12 +3165,51 @@ async def seal_append_blob(self, **kwargs): :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). :rtype: dict(str, Any) """ - options = self._seal_append_blob_options(**kwargs) + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + options = _seal_append_blob_options(**kwargs) try: - return await self._client.append_blob.seal(**options) # type: ignore + return cast(Dict[str, Any], await self._client.append_blob.seal(**options)) except HttpResponseError as error: process_storage_error(error) + + def _get_container_client(self) -> "ContainerClient": + """Get a client to interact with the blob's parent container. + + The container need not already exist. Defaults to current blob's credentials. + + :returns: A ContainerClient. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START get_container_client_from_blob_client] + :end-before: [END get_container_client_from_blob_client] + :language: python + :dedent: 12 + :caption: Get container client from blob object. + """ + from ._container_client_async import ContainerClient + if not isinstance(self._pipeline._transport, AsyncTransportWrapper): # pylint: disable = protected-access + _pipeline = AsyncPipeline( + transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=cast(Iterable["AsyncHTTPPolicy"], + self._pipeline._impl_policies) # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline + return ContainerClient( + f"{self.scheme}://{self.primary_hostname}", container_name=self.container_name, + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_blob_service_client_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_blob_service_client_async.py index 4e91743c38be..8f76aa98c8cf 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_blob_service_client_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_blob_service_client_async.py @@ -3,51 +3,66 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=invalid-overridden-method +# pylint: disable=docstring-keyword-should-match-keyword-only + import functools -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, Dict, List, +import warnings +from typing import ( + Any, cast, Dict, Iterable, List, Optional, Union, TYPE_CHECKING ) +from typing_extensions import Self +from azure.core.async_paging import AsyncItemPaged from azure.core.exceptions import HttpResponseError -from azure.core.tracing.decorator import distributed_trace from azure.core.pipeline import AsyncPipeline +from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.async_paging import AsyncItemPaged -from .._shared.models import LocationMode -from .._shared.policies_async import ExponentialRetry -from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper -from .._shared.response_handlers import return_response_headers, process_storage_error -from .._shared.parser import _to_utc_datetime -from .._shared.response_handlers import parse_to_internal_user_delegation_key +from ._blob_client_async import BlobClient +from ._container_client_async import ContainerClient +from ._models import ContainerPropertiesPaged, FilteredBlobPaged +from .._blob_service_client_helpers import _parse_url +from .._deserialize import service_properties_deserialize, service_stats_deserialize +from .._encryption import StorageEncryptionMixin from .._generated.aio import AzureBlobStorage from .._generated.models import StorageServiceProperties, KeyInfo -from .._blob_service_client import BlobServiceClient as BlobServiceClientBase -from ._container_client_async import ContainerClient -from ._blob_client_async import BlobClient -from .._models import ContainerProperties -from .._deserialize import service_stats_deserialize, service_properties_deserialize +from .._models import BlobProperties, ContainerProperties, CorsRule from .._serialize import get_api_version -from ._models import ContainerPropertiesPaged, FilteredBlobPaged +from .._shared.base_client import parse_query, StorageAccountHostsMixin +from .._shared.base_client_async import parse_connection_str +from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper +from .._shared.response_handlers import ( + parse_to_internal_user_delegation_key, + process_storage_error, + return_response_headers, +) +from .._shared.models import LocationMode +from .._shared.parser import _to_utc_datetime +from .._shared.policies_async import ExponentialRetry if TYPE_CHECKING: + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential + from azure.core.credentials_async import AsyncTokenCredential + from azure.core.pipeline.policies import AsyncHTTPPolicy from datetime import datetime - from .._shared.models import AccountSasPermissions, ResourceTypes, UserDelegationKey from ._lease_async import BlobLeaseClient from .._models import ( - BlobProperties, - PublicAccess, BlobAnalyticsLogging, + FilteredBlob, Metrics, - CorsRule, + PublicAccess, RetentionPolicy, - StaticWebsite, + StaticWebsite ) + from .._shared.models import UserDelegationKey -class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase): +class BlobServiceClient( # type: ignore [misc] + AsyncStorageAccountHostsMixin, + StorageAccountHostsMixin, + StorageEncryptionMixin +): """A client to interact with the Blob Service at the account level. This client provides operations to retrieve and configure the account properties @@ -62,13 +77,15 @@ class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase): :param credential: The credentials with which to authenticate. This is optional if the account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. :keyword str api_version: - The Storage API version to use for requests. Default value is '2019-07-07'. - Setting to an older version may result in reduced feature compatibility. + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. .. versionadded:: 12.2.0 @@ -87,6 +104,9 @@ class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase): the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, or 4MB. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. .. admonition:: Example: @@ -106,27 +126,80 @@ class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase): """ def __init__( - self, account_url, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None + self, account_url: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) - super(BlobServiceClient, self).__init__( - account_url, - credential=credential, - **kwargs) - self._client = AzureBlobStorage(url=self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access - self._loop = kwargs.get('loop', None) + parsed_url, sas_token = _parse_url(account_url=account_url) + _, sas_token = parse_query(parsed_url.query) + self._query_str, credential = self._format_query_string(sas_token, credential) + super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._configure_encryption(kwargs) + + def _format_url(self, hostname): + """Format the endpoint URL according to the current location + mode hostname. + + :param str hostname: + The hostname of the current location mode. + :returns: A formatted endpoint URL including current location mode hostname. + :rtype: str + """ + return f"{self.scheme}://{hostname}/{self._query_str}" + + @classmethod + def from_connection_string( + cls, conn_str: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: + """Create BlobServiceClient from a Connection String. + + :param str conn_str: + A connection string to an Azure Storage account. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. + Credentials provided here will take precedence over those in the connection string. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials_async.AsyncTokenCredential or + str or dict[str, str] or None + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :returns: A Blob service client. + :rtype: ~azure.storage.blob.BlobServiceClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START auth_from_connection_string] + :end-before: [END auth_from_connection_string] + :language: python + :dedent: 8 + :caption: Creating the BlobServiceClient from a connection string. + """ + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') + if 'secondary_hostname' not in kwargs: + kwargs['secondary_hostname'] = secondary + return cls(account_url, credential=credential, **kwargs) @distributed_trace_async - async def get_user_delegation_key(self, key_start_time, # type: datetime - key_expiry_time, # type: datetime - **kwargs # type: Any - ): - # type: (...) -> UserDelegationKey + async def get_user_delegation_key( + self, key_start_time: "datetime", + key_expiry_time: "datetime", + **kwargs: Any + ) -> "UserDelegationKey": """ Obtain a user delegation key for the purpose of signing SAS tokens. A token credential must be present on the service object for this request to succeed. @@ -136,7 +209,11 @@ async def get_user_delegation_key(self, key_start_time, # type: datetime :param ~datetime.datetime key_expiry_time: A DateTime value. Indicates when the key stops being valid. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: The user delegation key. :rtype: ~azure.storage.blob.UserDelegationKey """ @@ -152,8 +229,7 @@ async def get_user_delegation_key(self, key_start_time, # type: datetime return parse_to_internal_user_delegation_key(user_delegation_key) # type: ignore @distributed_trace_async - async def get_account_information(self, **kwargs): - # type: (Any) -> Dict[str, str] + async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: """Gets information related to the storage account. The information can also be retrieved if the user has a SAS to a container or blob. @@ -177,8 +253,7 @@ async def get_account_information(self, **kwargs): process_storage_error(error) @distributed_trace_async - async def get_service_stats(self, **kwargs): - # type: (Any) -> Dict[str, Any] + async def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: """Retrieves statistics related to replication for the Blob service. It is only available when read-access geo-redundant replication is enabled for @@ -198,7 +273,11 @@ async def get_service_stats(self, **kwargs): replication is enabled for your storage account. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: The blob service stats. :rtype: Dict[str, Any] @@ -220,13 +299,16 @@ async def get_service_stats(self, **kwargs): process_storage_error(error) @distributed_trace_async - async def get_service_properties(self, **kwargs): - # type: (Any) -> Dict[str, Any] + async def get_service_properties(self, **kwargs: Any) -> Dict[str, Any]: """Gets the properties of a storage account's Blob service, including Azure Storage Analytics. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An object containing blob service properties such as analytics logging, hour/minute metrics, cors rules, etc. :rtype: Dict[str, Any] @@ -249,16 +331,15 @@ async def get_service_properties(self, **kwargs): @distributed_trace_async async def set_service_properties( - self, analytics_logging=None, # type: Optional[BlobAnalyticsLogging] - hour_metrics=None, # type: Optional[Metrics] - minute_metrics=None, # type: Optional[Metrics] - cors=None, # type: Optional[List[CorsRule]] - target_version=None, # type: Optional[str] - delete_retention_policy=None, # type: Optional[RetentionPolicy] - static_website=None, # type: Optional[StaticWebsite] - **kwargs - ): - # type: (...) -> None + self, analytics_logging: Optional["BlobAnalyticsLogging"] = None, + hour_metrics: Optional["Metrics"] = None, + minute_metrics: Optional["Metrics"] = None, + cors: Optional[List[CorsRule]] = None, + target_version: Optional[str] = None, + delete_retention_policy: Optional["RetentionPolicy"] = None, + static_website: Optional["StaticWebsite"] = None, + **kwargs: Any + ) -> None: """Sets the properties of a storage account's Blob service, including Azure Storage Analytics. @@ -293,7 +374,11 @@ async def set_service_properties( and if yes, indicates the index document and 404 error document to use. :type static_website: ~azure.storage.blob.StaticWebsite :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -314,7 +399,7 @@ async def set_service_properties( logging=analytics_logging, hour_metrics=hour_metrics, minute_metrics=minute_metrics, - cors=cors, + cors=CorsRule._to_generated(cors), # pylint: disable=protected-access default_service_version=target_version, delete_retention_policy=delete_retention_policy, static_website=static_website @@ -327,11 +412,10 @@ async def set_service_properties( @distributed_trace def list_containers( - self, name_starts_with=None, # type: Optional[str] - include_metadata=False, # type: Optional[bool] - **kwargs - ): - # type: (...) -> AsyncItemPaged[ContainerProperties] + self, name_starts_with: Optional[str] = None, + include_metadata: bool = False, + **kwargs: Any + ) -> AsyncItemPaged[ContainerProperties]: """Returns a generator to list the containers under the specified account. The generator will lazily follow the continuation tokens returned by @@ -347,11 +431,18 @@ def list_containers( Specifies that deleted containers to be returned in the response. This is for container restore enabled account. The default value is `False`. .. versionadded:: 12.4.0 + :keyword bool include_system: + Flag specifying that system containers should be included. + .. versionadded:: 12.10.0 :keyword int results_per_page: The maximum number of container names to retrieve per API call. If the request does not specify the server will return up to 5,000 items. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) of ContainerProperties. :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.ContainerProperties] @@ -368,6 +459,9 @@ def list_containers( include_deleted = kwargs.pop('include_deleted', None) if include_deleted: include.append("deleted") + include_system = kwargs.pop('include_system', None) + if include_system: + include.append("system") timeout = kwargs.pop('timeout', None) results_per_page = kwargs.pop('results_per_page', None) command = functools.partial( @@ -384,8 +478,7 @@ def list_containers( ) @distributed_trace - def find_blobs_by_tags(self, filter_expression, **kwargs): - # type: (str, **Any) -> AsyncItemPaged[FilteredBlob] + def find_blobs_by_tags(self, filter_expression: str, **kwargs: Any) -> AsyncItemPaged["FilteredBlob"]: """The Filter Blobs operation enables callers to list blobs across all containers whose tags match a given search expression. Filter blobs searches across all containers within a storage account but can be @@ -398,7 +491,11 @@ def find_blobs_by_tags(self, filter_expression, **kwargs): :keyword int results_per_page: The max result per page when paginating. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) response of BlobProperties. :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.FilteredBlob] """ @@ -416,12 +513,11 @@ def find_blobs_by_tags(self, filter_expression, **kwargs): @distributed_trace_async async def create_container( - self, name, # type: str - metadata=None, # type: Optional[Dict[str, str]] - public_access=None, # type: Optional[Union[PublicAccess, str]] - **kwargs - ): - # type: (...) -> ContainerClient + self, name: str, + metadata: Optional[Dict[str, str]] = None, + public_access: Optional[Union["PublicAccess", str]] = None, + **kwargs: Any + ) -> ContainerClient: """Creates a new container under the specified account. If the container with the same name already exists, a ResourceExistsError will @@ -444,7 +540,12 @@ async def create_container( :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: A container client to interact with the newly created container. :rtype: ~azure.storage.blob.aio.ContainerClient .. admonition:: Example: @@ -465,11 +566,10 @@ async def create_container( @distributed_trace_async async def delete_container( - self, container, # type: Union[ContainerProperties, str] - lease=None, # type: Optional[Union[BlobLeaseClient, str]] - **kwargs - ): - # type: (...) -> None + self, container: Union[ContainerProperties, str], + lease: Optional[Union["BlobLeaseClient", str]] = None, + **kwargs: Any + ) -> None: """Marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. @@ -483,7 +583,7 @@ async def delete_container( If specified, delete_container only succeeds if the container's lease is active and matches this ID. Required if the container has an active lease. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :type lease: ~azure.storage.blob.aio.BlobLeaseClient or str :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -502,7 +602,11 @@ async def delete_container( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -514,17 +618,55 @@ async def delete_container( :dedent: 16 :caption: Deleting a container in the blob service. """ - container = self.get_container_client(container) # type: ignore + container_client = self.get_container_client(container) kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) - await container.delete_container( # type: ignore + await container_client.delete_container( lease=lease, timeout=timeout, **kwargs) @distributed_trace_async - async def undelete_container(self, deleted_container_name, deleted_container_version, **kwargs): - # type: (str, str, str, **Any) -> ContainerClient + async def _rename_container(self, name: str, new_name: str, **kwargs: Any) -> ContainerClient: + """Renames a container. + + Operation is successful only if the source container exists. + + :param str name: + The name of the container to rename. + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: A container client for the renamed container. + :rtype: ~azure.storage.blob.ContainerClient + """ + renamed_container = self.get_container_client(new_name) + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id + except AttributeError: + kwargs['source_lease_id'] = lease + try: + await renamed_container._client.container.rename(name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def undelete_container( + self, deleted_container_name: str, + deleted_container_version: str, + **kwargs: Any + ) -> ContainerClient: """Restores soft-deleted container. Operation will only be successful if used within the specified number of days @@ -537,14 +679,18 @@ async def undelete_container(self, deleted_container_name, deleted_container_ver Specifies the name of the deleted container to restore. :param str deleted_container_version: Specifies the version of the deleted container to restore. - :keyword str new_name: - The new name for the deleted container to be restored to. - If not specified deleted_container_name will be used as the restored container name. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: The recovered soft-deleted ContainerClient. :rtype: ~azure.storage.blob.aio.ContainerClient """ new_name = kwargs.pop('new_name', None) + if new_name: + warnings.warn("`new_name` is no longer supported.", DeprecationWarning) container = self.get_container_client(new_name or deleted_container_name) try: await container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable = protected-access @@ -554,8 +700,7 @@ async def undelete_container(self, deleted_container_name, deleted_container_ver except HttpResponseError as error: process_storage_error(error) - def get_container_client(self, container): - # type: (Union[ContainerProperties, str]) -> ContainerClient + def get_container_client(self, container: Union[ContainerProperties, str]) -> ContainerClient: """Get a client to interact with the specified container. The container need not already exist. @@ -576,27 +721,28 @@ def get_container_client(self, container): :dedent: 12 :caption: Getting the container client to interact with a specific container. """ - try: + if isinstance(container, ContainerProperties): container_name = container.name - except AttributeError: + else: container_name = container _pipeline = AsyncPipeline( transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access + policies=self._pipeline._impl_policies #type: ignore [arg-type] # pylint: disable = protected-access ) return ContainerClient( self.url, container_name=container_name, credential=self.credential, api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function, loop=self._loop) + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) def get_blob_client( - self, container, # type: Union[ContainerProperties, str] - blob, # type: Union[BlobProperties, str] - snapshot=None # type: Optional[Union[Dict[str, Any], str]] - ): - # type: (...) -> BlobClient + self, container: Union[ContainerProperties, str], + blob: str, + snapshot: Optional[Union[Dict[str, Any], str]] = None, + *, + version_id: Optional[str] = None + ) -> BlobClient: """Get a client to interact with the specified blob. The blob need not already exist. @@ -605,15 +751,15 @@ def get_blob_client( The container that the blob is in. This can either be the name of the container, or an instance of ContainerProperties. :type container: str or ~azure.storage.blob.ContainerProperties - :param blob: - The blob with which to interact. This can either be the name of the blob, - or an instance of BlobProperties. - :type blob: str or ~azure.storage.blob.BlobProperties + :param str blob: + The blob with which to interact. :param snapshot: The optional blob snapshot on which to operate. This can either be the ID of the snapshot, or a dictionary output returned by :func:`~azure.storage.blob.aio.BlobClient.create_snapshot()`. :type snapshot: str or dict(str, Any) + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. :returns: A BlobClient. :rtype: ~azure.storage.blob.aio.BlobClient @@ -626,22 +772,28 @@ def get_blob_client( :dedent: 16 :caption: Getting the blob client to interact with a specific blob. """ - try: - container_name = container.name - except AttributeError: - container_name = container - - try: + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob_name = blob.name - except AttributeError: + else: blob_name = blob + if isinstance(container, ContainerProperties): + container_name = container.name + else: + container_name = container _pipeline = AsyncPipeline( transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access + policies=cast(Iterable["AsyncHTTPPolicy"], + self._pipeline._impl_policies) # pylint: disable = protected-access ) - return BlobClient( # type: ignore + return BlobClient( self.url, container_name=container_name, blob_name=blob_name, snapshot=snapshot, credential=self.credential, api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function, loop=self._loop) + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, + version_id=version_id) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_container_client_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_container_client_async.py index 7ad617d77ed5..306e3acf5519 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_container_client_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_container_client_async.py @@ -1,51 +1,68 @@ -# pylint: disable=too-many-lines # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=invalid-overridden-method +# pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only + import functools -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, Iterable, AnyStr, Dict, List, IO, AsyncIterator, +import warnings +from datetime import datetime +from typing import ( + Any, AnyStr, AsyncIterable, AsyncIterator, cast, Dict, List, IO, Iterable, Optional, overload, Union, TYPE_CHECKING ) +from urllib.parse import unquote, urlparse +from typing_extensions import Self -from azure.core.exceptions import HttpResponseError +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceNotFoundError +from azure.core.pipeline import AsyncPipeline +from azure.core.pipeline.transport import AsyncHttpResponse # pylint: disable=C4756 from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.async_paging import AsyncItemPaged -from azure.core.pipeline import AsyncPipeline -from azure.core.pipeline.transport import AsyncHttpResponse -from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper +from ._blob_client_async import BlobClient +from ._download_async import StorageStreamDownloader +from ._lease_async import BlobLeaseClient +from ._list_blobs_helper import BlobNamesPaged, BlobPropertiesPaged, BlobPrefix +from ._models import FilteredBlobPaged +from .._container_client_helpers import ( + _format_url, + _generate_delete_blobs_options, + _generate_set_tiers_options, + _parse_url +) +from .._deserialize import deserialize_container_properties +from .._encryption import StorageEncryptionMixin +from .._generated.aio import AzureBlobStorage +from .._generated.models import SignedIdentifier +from .._list_blobs_helper import IgnoreListBlobsDeserializer +from .._models import ContainerProperties, BlobType, BlobProperties, FilteredBlob +from .._serialize import get_modify_conditions, get_container_cpk_scope_info, get_api_version, get_access_conditions +from .._shared.base_client import StorageAccountHostsMixin +from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper, parse_connection_str from .._shared.policies_async import ExponentialRetry from .._shared.request_handlers import add_metadata_headers, serialize_iso from .._shared.response_handlers import ( process_storage_error, - return_response_headers, - return_headers_and_deserialized) -from .._generated.aio import AzureBlobStorage -from .._generated.models import SignedIdentifier -from .._deserialize import deserialize_container_properties -from .._serialize import get_modify_conditions, get_container_cpk_scope_info, get_api_version, get_access_conditions -from .._container_client import ContainerClient as ContainerClientBase, _get_blob_name -from .._models import ContainerProperties, BlobType, BlobProperties # pylint: disable=unused-import -from ._list_blobs_helper import BlobPropertiesPaged, BlobPrefix -from ._lease_async import BlobLeaseClient -from ._blob_client_async import BlobClient + return_headers_and_deserialized, + return_response_headers +) if TYPE_CHECKING: - from .._models import PublicAccess - from ._download_async import StorageStreamDownloader - from datetime import datetime - from .._models import ( # pylint: disable=unused-import + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential + from azure.core.credentials_async import AsyncTokenCredential + from ._blob_service_client_async import BlobServiceClient + from .._models import ( AccessPolicy, StandardBlobTier, - PremiumPageBlobTier) + PremiumPageBlobTier, + PublicAccess + ) -class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase): +class ContainerClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin, StorageEncryptionMixin): # type: ignore [misc] # pylint: disable=too-many-public-methods """A client to interact with a specific container, although that container may not yet exist. @@ -61,13 +78,15 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase): :param credential: The credentials with which to authenticate. This is optional if the account URL already has a SAS token. The value can be a SAS token string, - an instance of a AzureSasCredential from azure.core.credentials, an account - shared access key, or an instance of a TokenCredentials class from azure.identity. + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. :keyword str api_version: - The Storage API version to use for requests. Default value is '2019-07-07'. - Setting to an older version may result in reduced feature compatibility. + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. .. versionadded:: 12.2.0 @@ -86,6 +105,9 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase): the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, or 4MB. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. .. admonition:: Example: @@ -104,26 +126,143 @@ class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase): :caption: Creating the container client directly. """ def __init__( - self, account_url, # type: str - container_name, # type: str - credential=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> None + self, account_url: str, + container_name: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> None: kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) - super(ContainerClient, self).__init__( - account_url, - container_name=container_name, - credential=credential, - **kwargs) - self._client = AzureBlobStorage(url=self.url, pipeline=self._pipeline) - default_api_version = self._client._config.version # pylint: disable=protected-access - self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access - self._loop = kwargs.get('loop', None) + parsed_url, sas_token = _parse_url(account_url=account_url, container_name=container_name) + + self.container_name = container_name + # This parameter is used for the hierarchy traversal. Give precedence to credential. + self._raw_credential = credential if credential else sas_token + self._query_str, credential = self._format_query_string(sas_token, credential) + super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + self._api_version = get_api_version(kwargs) + self._client = self._build_generated_client() + self._configure_encryption(kwargs) + + def _build_generated_client(self) -> AzureBlobStorage: + client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access + return client + + def _format_url(self, hostname): + return _format_url( + container_name=self.container_name, + hostname=hostname, + scheme=self.scheme, + query_str=self._query_str + ) + + @classmethod + def from_container_url( + cls, container_url: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: + """Create ContainerClient from a container url. + + :param str container_url: + The full endpoint URL to the Container, including SAS token if used. This could be + either the primary endpoint, or the secondary endpoint depending on the current `location_mode`. + :type container_url: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials_async.AsyncTokenCredential or + str or dict[str, str] or None + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :returns: A container client. + :rtype: ~azure.storage.blob.ContainerClient + """ + try: + if not container_url.lower().startswith('http'): + container_url = "https://" + container_url + except AttributeError as exc: + raise ValueError("Container URL must be a string.") from exc + parsed_url = urlparse(container_url) + if not parsed_url.netloc: + raise ValueError(f"Invalid URL: {container_url}") + + container_path = parsed_url.path.strip('/').split('/') + account_path = "" + if len(container_path) > 1: + account_path = "/" + "/".join(container_path[:-1]) + account_url = f"{parsed_url.scheme}://{parsed_url.netloc.rstrip('/')}{account_path}?{parsed_url.query}" + container_name = unquote(container_path[-1]) + if not container_name: + raise ValueError("Invalid URL. Please provide a URL with a valid container name") + return cls(account_url, container_name=container_name, credential=credential, **kwargs) + + @classmethod + def from_connection_string( + cls, conn_str: str, + container_name: str, + credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + **kwargs: Any + ) -> Self: + """Create ContainerClient from a Connection String. + + :param str conn_str: + A connection string to an Azure Storage account. + :param container_name: + The container name for the blob. + :type container_name: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. + Credentials provided here will take precedence over those in the connection string. + If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" + should be the storage account key. + :type credential: + ~azure.core.credentials.AzureNamedKeyCredential or + ~azure.core.credentials.AzureSasCredential or + ~azure.core.credentials_async.AsyncTokenCredential or + str or dict[str, str] or None + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.blob.core.windows.net. + :returns: A container client. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START auth_from_connection_string_container] + :end-before: [END auth_from_connection_string_container] + :language: python + :dedent: 8 + :caption: Creating the ContainerClient from a connection string. + """ + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') + if 'secondary_hostname' not in kwargs: + kwargs['secondary_hostname'] = secondary + return cls( + account_url, container_name=container_name, credential=credential, **kwargs) @distributed_trace_async - async def create_container(self, metadata=None, public_access=None, **kwargs): - # type: (Optional[Dict[str, str]], Optional[Union[PublicAccess, str]], **Any) -> None + async def create_container( + self, metadata: Optional[Dict[str, str]] = None, + public_access: Optional[Union["PublicAccess", str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """ Creates a new container under the specified account. If the container with the same name already exists, the operation fails. @@ -142,8 +281,13 @@ async def create_container(self, metadata=None, public_access=None, **kwargs): :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope :keyword int timeout: - The timeout parameter is expressed in seconds. - :rtype: None + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: A dictionary of response headers. + :rtype: Dict[str, Union[str, datetime]] .. admonition:: Example: @@ -170,9 +314,45 @@ async def create_container(self, metadata=None, public_access=None, **kwargs): process_storage_error(error) @distributed_trace_async - async def delete_container( - self, **kwargs): - # type: (Any) -> None + async def _rename_container(self, new_name: str, **kwargs: Any) -> "ContainerClient": + """Renames a container. + + Operation is successful only if the source container exists. + + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: The renamed container. + :rtype: ~azure.storage.blob.ContainerClient + """ + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id + except AttributeError: + kwargs['source_lease_id'] = lease + try: + renamed_container = ContainerClient( + f"{self.scheme}://{self.primary_hostname}", container_name=new_name, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) + await renamed_container._client.container.rename(self.container_name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def delete_container(self, **kwargs: Any) -> None: """ Marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. @@ -200,7 +380,11 @@ async def delete_container( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None .. admonition:: Example: @@ -227,10 +411,10 @@ async def delete_container( @distributed_trace_async async def acquire_lease( - self, lease_duration=-1, # type: int - lease_id=None, # type: Optional[str] - **kwargs): - # type: (...) -> BlobLeaseClient + self, lease_duration: int =-1, + lease_id: Optional[str] = None, + **kwargs: Any + ) -> BlobLeaseClient: """ Requests a new lease. If the container does not have an active lease, the Blob service creates a lease on the container and returns a new @@ -262,7 +446,11 @@ async def acquire_lease( :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: A BlobLeaseClient object, that can be run in a context manager. :rtype: ~azure.storage.blob.aio.BlobLeaseClient @@ -282,8 +470,7 @@ async def acquire_lease( return lease @distributed_trace_async - async def get_account_information(self, **kwargs): - # type: (**Any) -> Dict[str, str] + async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: """Gets information related to the storage account. The information can also be retrieved if the user has a SAS to a container or blob. @@ -298,8 +485,7 @@ async def get_account_information(self, **kwargs): process_storage_error(error) @distributed_trace_async - async def get_container_properties(self, **kwargs): - # type: (**Any) -> ContainerProperties + async def get_container_properties(self, **kwargs: Any) -> ContainerProperties: """Returns all user-defined metadata and system properties for the specified container. The data returned does not include the container's list of blobs. @@ -308,7 +494,11 @@ async def get_container_properties(self, **kwargs): container's lease is active and matches this ID. :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: Properties for the specified container within a container object. :rtype: ~azure.storage.blob.ContainerProperties @@ -336,11 +526,33 @@ async def get_container_properties(self, **kwargs): return response # type: ignore @distributed_trace_async - async def set_container_metadata( # type: ignore - self, metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> Dict[str, Union[str, datetime]] + async def exists(self, **kwargs: Any) -> bool: + """ + Returns True if a container exists and returns False otherwise. + + :kwarg int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: boolean + :rtype: bool + """ + try: + await self._client.container.get_properties(**kwargs) + return True + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceNotFoundError: + return False + + @distributed_trace_async + async def set_container_metadata( + self, metadata: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets one or more user-defined name-value pairs for the specified container. Each call to this operation replaces all existing metadata attached to the container. To remove all metadata from the container, @@ -361,8 +573,13 @@ async def set_container_metadata( # type: ignore Specify this header to perform the operation only if the resource has been modified since the specified time. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Container-updated property dict (Etag and last modified). + :rtype: Dict[str, Union[str, datetime]] .. admonition:: Example: @@ -380,7 +597,7 @@ async def set_container_metadata( # type: ignore mod_conditions = get_modify_conditions(kwargs) timeout = kwargs.pop('timeout', None) try: - return await self._client.container.set_metadata( # type: ignore + return await self._client.container.set_metadata( # type: ignore timeout=timeout, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, @@ -390,9 +607,42 @@ async def set_container_metadata( # type: ignore except HttpResponseError as error: process_storage_error(error) + @distributed_trace + def _get_blob_service_client(self) -> "BlobServiceClient": + """Get a client to interact with the container's parent service account. + + Defaults to current container's credentials. + + :returns: A BlobServiceClient. + :rtype: ~azure.storage.blob.BlobServiceClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service_async.py + :start-after: [START get_blob_service_client_from_container_client] + :end-before: [END get_blob_service_client_from_container_client] + :language: python + :dedent: 8 + :caption: Get blob service client from container object. + """ + from ._blob_service_client_async import BlobServiceClient + if not isinstance(self._pipeline._transport, AsyncTransportWrapper): # pylint: disable = protected-access + _pipeline = AsyncPipeline( + transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies #type: ignore [arg-type] # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline + return BlobServiceClient( + f"{self.scheme}://{self.primary_hostname}", + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _location_mode=self._location_mode, _hosts=self._hosts, require_encryption=self.require_encryption, + encryption_version=self.encryption_version, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function, _pipeline=_pipeline) + + @distributed_trace_async - async def get_container_access_policy(self, **kwargs): - # type: (Any) -> Dict[str, Any] + async def get_container_access_policy(self, **kwargs: Any) -> Dict[str, Any]: """Gets the permissions for the specified container. The permissions indicate whether container data may be accessed publicly. @@ -401,7 +651,11 @@ async def get_container_access_policy(self, **kwargs): container's lease is active and matches this ID. :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Access policy information in a dict. :rtype: dict[str, Any] @@ -432,10 +686,10 @@ async def get_container_access_policy(self, **kwargs): @distributed_trace_async async def set_container_access_policy( - self, signed_identifiers, # type: Dict[str, AccessPolicy] - public_access=None, # type: Optional[Union[str, PublicAccess]] - **kwargs # type: Any - ): # type: (...) -> Dict[str, Union[str, datetime]] + self, signed_identifiers: Dict[str, "AccessPolicy"], + public_access: Optional[Union[str, "PublicAccess"]] = None, + **kwargs: Any + ) -> Dict[str, Union[str, datetime]]: """Sets the permissions for the specified container or stored access policies that may be used with Shared Access Signatures. The permissions indicate whether blobs in a container may be accessed publicly. @@ -464,7 +718,11 @@ async def set_container_access_policy( Specify this header to perform the operation only if the resource has not been modified since the specified date/time. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: Container-updated property dict (Etag and last modified). :rtype: dict[str, str or ~datetime.datetime] @@ -494,20 +752,23 @@ async def set_container_access_policy( mod_conditions = get_modify_conditions(kwargs) access_conditions = get_access_conditions(lease) try: - return await self._client.container.set_access_policy( + return cast(Dict[str, Union[str, datetime]], await self._client.container.set_access_policy( container_acl=signed_identifiers or None, timeout=timeout, access=public_access, lease_access_conditions=access_conditions, modified_access_conditions=mod_conditions, cls=return_response_headers, - **kwargs) + **kwargs)) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def list_blobs(self, name_starts_with=None, include=None, **kwargs): - # type: (Optional[str], Optional[Union[str, List[str]]], **Any) -> AsyncItemPaged[BlobProperties] + def list_blobs( + self, name_starts_with: Optional[str] = None, + include: Optional[Union[str, List[str]]] = None, + **kwargs: Any + ) -> AsyncItemPaged[BlobProperties]: """Returns a generator to list the blobs under the specified container. The generator will lazily follow the continuation tokens returned by the service. @@ -515,11 +776,17 @@ def list_blobs(self, name_starts_with=None, include=None, **kwargs): :param str name_starts_with: Filters the results to return only blobs whose names begin with the specified prefix. - :param list[str] or str include: + :param include: Specifies one or more additional datasets to include in the response. - Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'tags'. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', + 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. + :type include: list[str] or str :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) response of BlobProperties. :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.BlobProperties] @@ -532,6 +799,10 @@ def list_blobs(self, name_starts_with=None, include=None, **kwargs): :dedent: 12 :caption: List the blobs in the container. """ + if kwargs.pop('prefix', None): + raise ValueError("Passing 'prefix' has no effect on filtering, " + + "please use the 'name_starts_with' parameter instead.") + if include and not isinstance(include, list): include = [include] @@ -546,17 +817,63 @@ def list_blobs(self, name_starts_with=None, include=None, **kwargs): command, prefix=name_starts_with, results_per_page=results_per_page, + container=self.container_name, page_iterator_class=BlobPropertiesPaged ) + @distributed_trace + def list_blob_names(self, **kwargs: Any) -> AsyncItemPaged[str]: + """Returns a generator to list the names of blobs under the specified container. + The generator will lazily follow the continuation tokens returned by + the service. + + Note that no additional properties or metadata will be returned when using this API. + Additionally this API does not have an option to include additional blobs such as snapshots, + versions, soft-deleted blobs, etc. To get any of this data, use :func:`list_blobs()`. + + :keyword str name_starts_with: + Filters the results to return only blobs whose names + begin with the specified prefix. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: An iterable (auto-paging) response of blob names as strings. + :rtype: ~azure.core.async_paging.AsyncItemPaged[str] + """ + if kwargs.pop('prefix', None): + raise ValueError("Passing 'prefix' has no effect on filtering, " + + "please use the 'name_starts_with' parameter instead.") + + name_starts_with = kwargs.pop('name_starts_with', None) + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + + # For listing only names we need to create a one-off generated client and + # override its deserializer to prevent deserialization of the full response. + client = self._build_generated_client() + client.container._deserialize = IgnoreListBlobsDeserializer() # pylint: disable=protected-access + + command = functools.partial( + client.container.list_blob_flat_segment, + timeout=timeout, + **kwargs) + return AsyncItemPaged( + command, + prefix=name_starts_with, + results_per_page=results_per_page, + container=self.container_name, + page_iterator_class=BlobNamesPaged) + @distributed_trace def walk_blobs( - self, name_starts_with=None, # type: Optional[str] - include=None, # type: Optional[Any] - delimiter="/", # type: str - **kwargs # type: Optional[Any] - ): - # type: (...) -> AsyncItemPaged[BlobProperties] + self, name_starts_with: Optional[str] = None, + include: Optional[Union[List[str], str]] = None, + delimiter: str = "/", + **kwargs: Any + ) -> AsyncItemPaged[BlobProperties]: """Returns a generator to list the blobs under the specified container. The generator will lazily follow the continuation tokens returned by the service. This operation will list blobs in accordance with a hierarchy, @@ -565,19 +882,29 @@ def walk_blobs( :param str name_starts_with: Filters the results to return only blobs whose names begin with the specified prefix. - :param list[str] include: + :param include: Specifies one or more additional datasets to include in the response. - Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted'. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', + 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. + :type include: list[str] or str :param str delimiter: When the request includes this parameter, the operation returns a BlobPrefix element in the response body that acts as a placeholder for all blobs whose names begin with the same substring up to the appearance of the delimiter character. The delimiter may be a single character or a string. :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :returns: An iterable (auto-paging) response of BlobProperties. :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.BlobProperties] """ + if kwargs.pop('prefix', None): + raise ValueError("Passing 'prefix' has no effect on filtering, " + + "please use the 'name_starts_with' parameter instead.") + if include and not isinstance(include, list): include = [include] @@ -593,24 +920,59 @@ def walk_blobs( command, prefix=name_starts_with, results_per_page=results_per_page, + container=self.container_name, delimiter=delimiter) + @distributed_trace + def find_blobs_by_tags( + self, filter_expression: str, + **kwargs: Any + ) -> AsyncItemPaged[FilteredBlob]: + """Returns a generator to list the blobs under the specified container whose tags + match the given search expression. + The generator will lazily follow the continuation tokens returned by + the service. + + :param str filter_expression: + The expression to find blobs whose tags matches the specified condition. + eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" + :keyword int results_per_page: + The max result per page when paginating. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: An iterable (auto-paging) response of FilteredBlob. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] + """ + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.container.filter_blobs, + timeout=timeout, + where=filter_expression, + **kwargs) + return AsyncItemPaged( + command, results_per_page=results_per_page, + container=self.container_name, + page_iterator_class=FilteredBlobPaged) + @distributed_trace_async async def upload_blob( - self, name, # type: Union[str, BlobProperties] - data, # type: Union[Iterable[AnyStr], IO[AnyStr]] - blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] - length=None, # type: Optional[int] - metadata=None, # type: Optional[Dict[str, str]] - **kwargs - ): - # type: (...) -> BlobClient + self, name: str, + data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]], + blob_type: Union[str, BlobType] = BlobType.BLOCKBLOB, + length: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs + ) -> BlobClient: """Creates a new blob from a data source with automatic chunking. - :param name: The blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type name: str or ~azure.storage.blob.BlobProperties + :param str name: The blob with which to interact. :param data: The blob data to upload. + :type data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]] :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. :param int length: @@ -665,7 +1027,12 @@ async def upload_blob( .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. multiple calls to the Azure service and the timeout will apply to each call individually. :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: @@ -699,6 +1066,11 @@ async def upload_blob( :keyword str encoding: Defaults to UTF-8. + :keyword progress_hook: + An async callback to track the progress of a long running upload. The signature is + function(current: int, total: Optional[int]) where current is the number of bytes transferred + so far, and total is the size of the blob or None if the size is unknown. + :paramtype progress_hook: Callable[[int, Optional[int]], Awaitable[None]] :returns: A BlobClient to interact with the newly uploaded blob. :rtype: ~azure.storage.blob.aio.BlobClient @@ -711,6 +1083,12 @@ async def upload_blob( :dedent: 12 :caption: Upload blob to the container. """ + if isinstance(name, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param name is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob = self.get_blob_client(name) kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) @@ -728,11 +1106,10 @@ async def upload_blob( @distributed_trace_async async def delete_blob( - self, blob, # type: Union[str, BlobProperties] - delete_snapshots=None, # type: Optional[str] - **kwargs - ): - # type: (...) -> None + self, blob: str, + delete_snapshots: Optional[str] = None, + **kwargs: Any + ) -> None: """Marks the specified blob or snapshot for deletion. The blob is later deleted during garbage collection. @@ -743,12 +1120,10 @@ async def delete_blob( If a delete retention policy is enabled for the service, then this operation soft deletes the blob or snapshot and retains the blob or snapshot for specified number of days. After specified number of days, blob's data is removed from the service during garbage collection. - Soft deleted blob or snapshot is accessible through :func:`list_blobs()` specifying `include=["deleted"]` - option. Soft-deleted blob or snapshot can be restored using :func:`~BlobClient.undelete()` + Soft deleted blobs or snapshots are accessible through :func:`list_blobs()` specifying `include=["deleted"]` + Soft-deleted blob or snapshot can be restored using :func:`~azure.storage.blob.aio.BlobClient.undelete()` - :param blob: The blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type blob: str or ~azure.storage.blob.BlobProperties + :param str blob: The blob with which to interact. :param str delete_snapshots: Required if the blob has associated snapshots. Values include: - "only": Deletes only the blobs snapshots. @@ -758,6 +1133,7 @@ async def delete_blob( value that, when present, specifies the version of the blob to delete. .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. :keyword lease: @@ -788,9 +1164,19 @@ async def delete_blob( .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None """ + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) timeout = kwargs.pop('timeout', None) @@ -799,22 +1185,56 @@ async def delete_blob( timeout=timeout, **kwargs) + @overload + async def download_blob( + self, blob: str, + offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: str, + **kwargs: Any + ) -> StorageStreamDownloader[str]: + ... + + @overload + async def download_blob( + self, blob: str, + offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: None = None, + **kwargs: Any + ) -> StorageStreamDownloader[bytes]: + ... + @distributed_trace_async - async def download_blob(self, blob, offset=None, length=None, **kwargs): - # type: (Union[str, BlobProperties], Optional[int], Optional[int], Any) -> StorageStreamDownloader + async def download_blob( + self, blob: str, + offset: Optional[int] = None, + length: Optional[int] = None, + *, + encoding: Union[str, None] = None, + **kwargs: Any + ) -> Union[StorageStreamDownloader[str], StorageStreamDownloader[bytes]]: """Downloads a blob to the StorageStreamDownloader. The readall() method must be used to read all the content or readinto() must be used to download the blob into - a stream. + a stream. Using chunks() returns an async iterator which allows the user to iterate over the content in chunks. - :param blob: The blob with which to interact. If specified, this value will override - a blob value specified in the blob URL. - :type blob: str or ~azure.storage.blob.BlobProperties + :param str blob: The blob with which to interact. :param int offset: Start of byte range to use for downloading a section of the blob. Must be set if length is provided. :param int length: Number of bytes to read from the stream. This is optional, but should be supplied for optimal performance. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to download. + + .. versionadded:: 12.4.0 + + This keyword argument was introduced in API version '2019-12-12'. + :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the blob. The storage service checks the hash of the content that has arrived with the hash @@ -861,24 +1281,41 @@ async def download_blob(self, blob, offset=None, length=None, **kwargs): The number of parallel connections with which to download. :keyword str encoding: Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword progress_hook: + An async callback to track the progress of a long running download. The signature is + function(current: int, total: int) where current is the number of bytes transferred + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], Awaitable[None]] :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. This method may make multiple calls to the service and + the timeout will apply to each call individually. multiple calls to the Azure service and the timeout will apply to each call individually. :returns: A streaming object. (StorageStreamDownloader) :rtype: ~azure.storage.blob.aio.StorageStreamDownloader """ + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) blob_client = self.get_blob_client(blob) # type: ignore kwargs.setdefault('merge_span', True) return await blob_client.download_blob( offset=offset, length=length, + encoding=encoding, **kwargs) @distributed_trace_async - async def delete_blobs( # pylint: disable=arguments-differ - self, *blobs: List[Union[str, BlobProperties, dict]], - **kwargs + async def delete_blobs( + self, *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any ) -> AsyncIterator[AsyncHttpResponse]: """Marks the specified blobs or snapshots for deletion. @@ -890,7 +1327,9 @@ async def delete_blobs( # pylint: disable=arguments-differ and retains the blobs or snapshots for specified number of days. After specified number of days, blobs' data is removed from the service during garbage collection. Soft deleted blobs or snapshots are accessible through :func:`list_blobs()` specifying `include=["deleted"]` - Soft-deleted blobs or snapshots can be restored using :func:`~BlobClient.undelete()` + Soft-deleted blobs or snapshots can be restored using :func:`~azure.storage.blob.aio.BlobClient.undelete()` + + The maximum number of blobs that can be deleted in a single request is 256. :param blobs: The blobs to delete. This can be a single blob, or multiple values can @@ -903,6 +1342,8 @@ async def delete_blobs( # pylint: disable=arguments-differ key: 'name', value type: str snapshot you want to delete: key: 'snapshot', value type: str + version id: + key: 'version_id', value type: str whether to delete snapshots when deleting blob: key: 'delete_snapshots', value: 'include' or 'only' if the blob modified or not: @@ -918,7 +1359,7 @@ async def delete_blobs( # pylint: disable=arguments-differ timeout for subrequest: key: 'timeout', value type: int - :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :type blobs: Union[str, Dict[str, Any], BlobProperties] :keyword str delete_snapshots: Required if a blob has associated snapshots. Values include: - "only": Deletes only the blobs snapshots. @@ -946,7 +1387,11 @@ async def delete_blobs( # pylint: disable=arguments-differ is raised even if there is a single operation failure. For optimal performance, this should be set to False :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: An async iterator of responses, one for each blob in order :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] @@ -960,24 +1405,33 @@ async def delete_blobs( # pylint: disable=arguments-differ :caption: Deleting multiple blobs. """ if len(blobs) == 0: - return iter(list()) - - reqs, options = self._generate_delete_blobs_options(*blobs, **kwargs) + return AsyncList([]) + if self._is_localhost: + kwargs['url_prepend'] = self.account_name + + reqs, options = _generate_delete_blobs_options( + self._query_str, + self.container_name, + self._client, + *blobs, + **kwargs + ) - return await self._batch_send(*reqs, **options) + return cast(AsyncIterator[AsyncHttpResponse], await self._batch_send(*reqs, **options)) - @distributed_trace + @distributed_trace_async async def set_standard_blob_tier_blobs( - self, - standard_blob_tier: Union[str, 'StandardBlobTier'], - *blobs: List[Union[str, BlobProperties, dict]], - **kwargs + self, standard_blob_tier: Union[str, 'StandardBlobTier'], + *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any ) -> AsyncIterator[AsyncHttpResponse]: """This operation sets the tier on block blobs. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's ETag. + The maximum number of blobs that can be updated in a single request is 256. + :param standard_blob_tier: Indicates the tier to be set on all blobs. Options include 'Hot', 'Cool', 'Archive'. The hot tier is optimized for storing data that is accessed @@ -997,6 +1451,7 @@ async def set_standard_blob_tier_blobs( .. note:: When the blob type is dict, here's a list of keys, value rules. + blob name: key: 'name', value type: str standard blob tier: @@ -1010,7 +1465,7 @@ async def set_standard_blob_tier_blobs( timeout for subrequest: key: 'timeout', value type: int - :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: Indicates the priority with which to rehydrate an archived blob :keyword str if_tags_match_condition: @@ -1020,7 +1475,11 @@ async def set_standard_blob_tier_blobs( .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword bool raise_on_any_failure: This is a boolean param which defaults to True. When this is set, an exception is raised even if there is a single operation failure. For optimal performance, @@ -1028,19 +1487,28 @@ async def set_standard_blob_tier_blobs( :return: An async iterator of responses, one for each blob in order :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] """ - reqs, options = self._generate_set_tiers_options(standard_blob_tier, *blobs, **kwargs) + if self._is_localhost: + kwargs['url_prepend'] = self.account_name + reqs, options = _generate_set_tiers_options( + self._query_str, + self.container_name, + standard_blob_tier, + self._client, + *blobs, + **kwargs) - return await self._batch_send(*reqs, **options) + return cast(AsyncIterator[AsyncHttpResponse], await self._batch_send(*reqs, **options)) - @distributed_trace + @distributed_trace_async async def set_premium_page_blob_tier_blobs( - self, - premium_page_blob_tier: Union[str, 'PremiumPageBlobTier'], - *blobs: List[Union[str, BlobProperties, dict]], - **kwargs + self, premium_page_blob_tier: Union[str, 'PremiumPageBlobTier'], + *blobs: Union[str, Dict[str, Any], BlobProperties], + **kwargs: Any ) -> AsyncIterator[AsyncHttpResponse]: """Sets the page blob tiers on the blobs. This API is only supported for page blobs on premium accounts. + The maximum number of blobs that can be updated in a single request is 256. + :param premium_page_blob_tier: A page blob tier value to set on all blobs to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on @@ -1066,11 +1534,13 @@ async def set_premium_page_blob_tier_blobs( timeout for subrequest: key: 'timeout', value type: int - :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :type blobs: str or dict(str, Any) or ~azure.storage.blob.BlobProperties :keyword int timeout: - The timeout parameter is expressed in seconds. This method may make - multiple calls to the Azure service and the timeout will apply to - each call individually. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :keyword bool raise_on_any_failure: This is a boolean param which defaults to True. When this is set, an exception is raised even if there is a single operation failure. For optimal performance, @@ -1078,25 +1548,35 @@ async def set_premium_page_blob_tier_blobs( :return: An async iterator of responses, one for each blob in order :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] """ - reqs, options = self._generate_set_tiers_options(premium_page_blob_tier, *blobs, **kwargs) + if self._is_localhost: + kwargs['url_prepend'] = self.account_name + reqs, options = _generate_set_tiers_options( + self._query_str, + self.container_name, + premium_page_blob_tier, + self._client, + *blobs, + **kwargs) - return await self._batch_send(*reqs, **options) + return cast(AsyncIterator[AsyncHttpResponse], await self._batch_send(*reqs, **options)) def get_blob_client( - self, blob, # type: Union[BlobProperties, str] - snapshot=None # type: str - ): - # type: (...) -> BlobClient + self, blob: str, + snapshot: Optional[str] = None, + *, + version_id: Optional[str] = None + ) -> BlobClient: """Get a client to interact with the specified blob. The blob need not already exist. - :param blob: + :param str blob: The blob with which to interact. - :type blob: str or ~azure.storage.blob.BlobProperties :param str snapshot: The optional blob snapshot on which to operate. This can be the snapshot ID string or the response returned from :func:`~BlobClient.create_snapshot()`. + :keyword str version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. :returns: A BlobClient. :rtype: ~azure.storage.blob.aio.BlobClient @@ -1109,14 +1589,23 @@ def get_blob_client( :dedent: 12 :caption: Get the blob client. """ - blob_name = _get_blob_name(blob) + if isinstance(blob, BlobProperties): + warnings.warn( + "The use of a 'BlobProperties' instance for param blob is deprecated. " + + "Please use 'BlobProperties.name' or any other str input type instead.", + DeprecationWarning + ) + blob_name = blob.get('name') + else: + blob_name = blob _pipeline = AsyncPipeline( transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access + policies=self._pipeline._impl_policies # type: ignore [arg-type] # pylint: disable = protected-access ) return BlobClient( self.url, container_name=self.container_name, blob_name=blob_name, snapshot=snapshot, credential=self.credential, api_version=self.api_version, _configuration=self._config, _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, - require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - key_resolver_function=self.key_resolver_function, loop=self._loop) + require_encryption=self.require_encryption, encryption_version=self.encryption_version, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, + version_id=version_id) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_download_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_download_async.py index 3d5e5474d8d7..dab5afdca85d 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_download_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_download_async.py @@ -4,31 +4,54 @@ # license information. # -------------------------------------------------------------------------- # pylint: disable=invalid-overridden-method +# mypy: disable-error-code=override import asyncio +import codecs import sys -from io import BytesIO -from itertools import islice import warnings +from io import BytesIO, StringIO +from itertools import islice +from typing import ( + Any, AsyncIterator, Awaitable, + Generator, Callable, cast, Dict, + Generic, IO, Optional, overload, + Tuple, TypeVar, Union, TYPE_CHECKING +) + +from azure.core.exceptions import DecodeError, HttpResponseError, IncompleteReadError -from azure.core.exceptions import HttpResponseError -from .._shared.encryption import decrypt_blob from .._shared.request_handlers import validate_and_format_range_headers -from .._shared.response_handlers import process_storage_error, parse_length_from_content_range -from .._deserialize import get_page_ranges_result +from .._shared.response_handlers import parse_length_from_content_range, process_storage_error +from .._deserialize import deserialize_blob_properties, get_page_ranges_result from .._download import process_range_and_offset, _ChunkDownloader +from .._encryption import ( + adjust_blob_size_for_encryption, + decrypt_blob, + is_encryption_v2, + parse_encryption_data +) + +if TYPE_CHECKING: + from codecs import IncrementalDecoder + from .._encryption import _EncryptionData + from .._generated.aio import AzureBlobStorage + from .._models import BlobProperties + from .._shared.models import StorageConfiguration + + +T = TypeVar('T', bytes, str) -async def process_content(data, start_offset, end_offset, encryption): + +async def process_content(data: Any, start_offset: int, end_offset: int, encryption: Dict[str, Any]) -> bytes: if data is None: raise ValueError("Response cannot be None.") - try: - content = data.response.body() - except Exception as error: - raise HttpResponseError(message="Download stream interrupted.", response=data.response, error=error) + await data.response.read() + content = cast(bytes, data.response.content) if encryption.get('key') is not None or encryption.get('resolver') is not None: try: return decrypt_blob( - encryption.get('required'), + encryption.get('required') or False, encryption.get('key'), encryption.get('resolver'), content, @@ -39,153 +62,189 @@ async def process_content(data, start_offset, end_offset, encryption): raise HttpResponseError( message="Decryption failed.", response=data.response, - error=error) + error=error) from error return content class _AsyncChunkDownloader(_ChunkDownloader): - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: super(_AsyncChunkDownloader, self).__init__(**kwargs) - self.stream_lock = asyncio.Lock() if kwargs.get('parallel') else None - self.progress_lock = asyncio.Lock() if kwargs.get('parallel') else None + self.stream_lock_async = asyncio.Lock() if kwargs.get('parallel') else None + self.progress_lock_async = asyncio.Lock() if kwargs.get('parallel') else None - async def process_chunk(self, chunk_start): + async def process_chunk(self, chunk_start: int) -> None: chunk_start, chunk_end = self._calculate_range(chunk_start) - chunk_data = await self._download_chunk(chunk_start, chunk_end - 1) + chunk_data, _ = await self._download_chunk(chunk_start, chunk_end - 1) length = chunk_end - chunk_start if length > 0: await self._write_to_stream(chunk_data, chunk_start) await self._update_progress(length) - async def yield_chunk(self, chunk_start): + async def yield_chunk(self, chunk_start: int) -> Tuple[bytes, int]: chunk_start, chunk_end = self._calculate_range(chunk_start) return await self._download_chunk(chunk_start, chunk_end - 1) - async def _update_progress(self, length): - if self.progress_lock: - async with self.progress_lock: # pylint: disable=not-async-context-manager + async def _update_progress(self, length: int) -> None: + if self.progress_lock_async: + async with self.progress_lock_async: self.progress_total += length else: self.progress_total += length - async def _write_to_stream(self, chunk_data, chunk_start): - if self.stream_lock: - async with self.stream_lock: # pylint: disable=not-async-context-manager + if self.progress_hook: + await cast(Callable[[int, Optional[int]], Awaitable[Any]], self.progress_hook)( + self.progress_total, self.total_size) + + async def _write_to_stream(self, chunk_data: bytes, chunk_start: int) -> None: + if self.stream_lock_async: + async with self.stream_lock_async: self.stream.seek(self.stream_start + (chunk_start - self.start_index)) self.stream.write(chunk_data) else: self.stream.write(chunk_data) - async def _download_chunk(self, chunk_start, chunk_end): + async def _download_chunk(self, chunk_start: int, chunk_end: int) -> Tuple[bytes, int]: + if self.encryption_options is None: + raise ValueError("Required argument is missing: encryption_options") download_range, offset = process_range_and_offset( - chunk_start, chunk_end, chunk_end, self.encryption_options) + chunk_start, chunk_end, chunk_end, self.encryption_options, self.encryption_data + ) # No need to download the empty chunk from server if there's no data in the chunk to be downloaded. # Do optimize and create empty chunk locally if condition is met. if self._do_optimize(download_range[0], download_range[1]): - chunk_data = b"\x00" * self.chunk_size + content_length = download_range[1] - download_range[0] + 1 + chunk_data = b"\x00" * content_length else: range_header, range_validation = validate_and_format_range_headers( download_range[0], download_range[1], check_content_md5=self.validate_content ) - try: - _, response = await self.client.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self.validate_content, - data_stream_total=self.total_size, - download_stream_current=self.progress_total, - **self.request_options - ) - except HttpResponseError as error: - process_storage_error(error) - chunk_data = await process_content(response, offset[0], offset[1], self.encryption_options) + retry_active = True + retry_total = 3 + while retry_active: + try: + _, response = await cast(Awaitable[Any], self.client.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self.validate_content, + data_stream_total=self.total_size, + download_stream_current=self.progress_total, + **self.request_options + )) + except HttpResponseError as error: + process_storage_error(error) + + try: + chunk_data = await process_content(response, offset[0], offset[1], self.encryption_options) + retry_active = False + except (IncompleteReadError, HttpResponseError, DecodeError) as error: + retry_total -= 1 + if retry_total <= 0: + raise HttpResponseError(error, error=error) from error + await asyncio.sleep(1) + content_length = response.content_length # This makes sure that if_match is set so that we can validate # that subsequent downloads are to an unmodified blob if self.request_options.get('modified_access_conditions'): self.request_options['modified_access_conditions'].if_match = response.properties.etag - return chunk_data + return chunk_data, content_length class _AsyncChunkIterator(object): """Async iterator for chunks in blob download stream.""" - def __init__(self, size, content, downloader): + def __init__(self, size: int, content: bytes, downloader: Optional[_AsyncChunkDownloader], chunk_size: int) -> None: self.size = size + self._chunk_size = chunk_size self._current_content = content self._iter_downloader = downloader - self._iter_chunks = None - self._complete = (size == 0) + self._iter_chunks: Optional[Generator[int, None, None]] = None + self._complete = size == 0 - def __len__(self): + def __len__(self) -> int: return self.size - def __iter__(self): + def __iter__(self) -> None: raise TypeError("Async stream must be iterated asynchronously.") - def __aiter__(self): + def __aiter__(self) -> AsyncIterator[bytes]: return self - async def __anext__(self): - """Iterate through responses.""" + # Iterate through responses. + async def __anext__(self) -> bytes: if self._complete: raise StopAsyncIteration("Download complete") if not self._iter_downloader: - # If no iterator was supplied, the download completed with - # the initial GET, so we just return that data + # cut the data obtained from initial GET into chunks + if len(self._current_content) > self._chunk_size: + return self._get_chunk_data() self._complete = True return self._current_content if not self._iter_chunks: self._iter_chunks = self._iter_downloader.get_chunk_offsets() - else: - try: - chunk = next(self._iter_chunks) - except StopIteration: - raise StopAsyncIteration("Download complete") - self._current_content = await self._iter_downloader.yield_chunk(chunk) - return self._current_content + # initial GET result still has more than _chunk_size bytes of data + if len(self._current_content) >= self._chunk_size: + return self._get_chunk_data() + try: + chunk = next(self._iter_chunks) + self._current_content += (await self._iter_downloader.yield_chunk(chunk))[0] + except StopIteration as exc: + self._complete = True + # it's likely that there some data left in self._current_content + if self._current_content: + return self._current_content + raise StopAsyncIteration("Download complete") from exc -class StorageStreamDownloader(object): # pylint: disable=too-many-instance-attributes - """A streaming object to download from Azure Storage. + return self._get_chunk_data() - :ivar str name: - The name of the blob being downloaded. - :ivar str container: - The name of the container where the blob is. - :ivar ~azure.storage.blob.BlobProperties properties: - The properties of the blob being downloaded. If only a range of the data is being - downloaded, this will be reflected in the properties. - :ivar int size: - The size of the total data in the stream. This will be the byte range if specified, - otherwise the total size of the blob. + def _get_chunk_data(self) -> bytes: + chunk_data = self._current_content[: self._chunk_size] + self._current_content = self._current_content[self._chunk_size:] + return chunk_data + + +class StorageStreamDownloader(Generic[T]): # pylint: disable=too-many-instance-attributes + """ + A streaming object to download from Azure Storage. """ + name: str + """The name of the blob being downloaded.""" + container: str + """The name of the container where the blob is.""" + properties: "BlobProperties" + """The properties of the blob being downloaded. If only a range of the data is being + downloaded, this will be reflected in the properties.""" + size: int + """The size of the total data in the stream. This will be the byte range if specified, + otherwise the total size of the blob.""" + def __init__( - self, - clients=None, - config=None, - start_range=None, - end_range=None, - validate_content=None, - encryption_options=None, - max_concurrency=1, - name=None, - container=None, - encoding=None, - **kwargs - ): + self, + clients: "AzureBlobStorage" = None, # type: ignore [assignment] + config: "StorageConfiguration" = None, # type: ignore [assignment] + start_range: Optional[int] = None, + end_range: Optional[int] = None, + validate_content: bool = None, # type: ignore [assignment] + encryption_options: Dict[str, Any] = None, # type: ignore [assignment] + max_concurrency: int = 1, + name: str = None, # type: ignore [assignment] + container: str = None, # type: ignore [assignment] + encoding: Optional[str] = None, + download_cls: Optional[Callable] = None, + **kwargs: Any + ) -> None: self.name = name self.container = container - self.properties = None - self.size = None + self.size = 0 self._clients = clients self._config = config @@ -195,63 +254,98 @@ def __init__( self._encoding = encoding self._validate_content = validate_content self._encryption_options = encryption_options or {} + self._progress_hook = kwargs.pop('progress_hook', None) self._request_options = kwargs + self._response = None self._location_mode = None - self._download_complete = False - self._current_content = None - self._file_size = None + self._current_content: Union[str, bytes] = b'' + self._file_size = 0 self._non_empty_ranges = None - self._response = None + self._encryption_data: Optional["_EncryptionData"] = None + + # The content download offset, after any processing (decryption), in bytes + self._download_offset = 0 + # The raw download offset, before processing (decryption), in bytes + self._raw_download_offset = 0 + # The offset the stream has been read to in bytes or chars depending on mode + self._read_offset = 0 + # The offset into current_content that has been consumed in bytes or chars depending on mode + self._current_content_offset = 0 + + self._text_mode: Optional[bool] = None + self._decoder: Optional["IncrementalDecoder"] = None + # Whether the current content is the first chunk of download content or not + self._first_chunk = True + self._download_start = self._start_range or 0 + + # The cls is passed in via download_cls to avoid conflicting arg name with Generic.__new__ + # but needs to be changed to cls in the request options. + self._request_options['cls'] = download_cls + + def __len__(self): + return self.size + + async def _get_encryption_data_request(self) -> None: + # Save current request cls + download_cls = self._request_options.pop('cls', None) + # Adjust cls for get_properties + self._request_options['cls'] = deserialize_blob_properties + + properties = cast("BlobProperties", await self._clients.blob.get_properties(**self._request_options)) + # This will return None if there is no encryption metadata or there are parsing errors. + # That is acceptable here, the proper error will be caught and surfaced when attempting + # to decrypt the blob. + self._encryption_data = parse_encryption_data(properties.metadata) + + # Restore cls for download + self._request_options['cls'] = download_cls + + async def _setup(self) -> None: + if self._encryption_options.get("key") is not None or self._encryption_options.get("resolver") is not None: + await self._get_encryption_data_request() # The service only provides transactional MD5s for chunks under 4MB. # If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first # chunk so a transactional MD5 can be retrieved. - self._first_get_size = self._config.max_single_get_size if not self._validate_content \ - else self._config.max_chunk_get_size + first_get_size = ( + self._config.max_single_get_size if not self._validate_content else self._config.max_chunk_get_size + ) initial_request_start = self._start_range if self._start_range is not None else 0 - if self._end_range is not None and self._end_range - self._start_range < self._first_get_size: + if self._end_range is not None and self._end_range - initial_request_start < first_get_size: initial_request_end = self._end_range else: - initial_request_end = initial_request_start + self._first_get_size - 1 + initial_request_end = initial_request_start + first_get_size - 1 + # pylint: disable-next=attribute-defined-outside-init self._initial_range, self._initial_offset = process_range_and_offset( - initial_request_start, initial_request_end, self._end_range, self._encryption_options + initial_request_start, + initial_request_end, + self._end_range, + self._encryption_options, + self._encryption_data ) - def __len__(self): - return self.size - - async def _setup(self): self._response = await self._initial_request() - self.properties = self._response.properties + self.properties = cast("BlobProperties", self._response.properties) # type: ignore [attr-defined] self.properties.name = self.name self.properties.container = self.container - # Set the content length to the download size instead of the size of - # the last range + # Set the content length to the download size instead of the size of the last range self.properties.size = self.size - - # Overwrite the content range to the user requested range - self.properties.content_range = 'bytes {0}-{1}/{2}'.format( - self._start_range, - self._end_range, - self._file_size - ) + self.properties.content_range = (f"bytes {self._download_start}-" + f"{self._end_range if self._end_range is not None else self._file_size - 1}/" + f"{self._file_size}") # Overwrite the content MD5 as it is the MD5 for the last range instead # of the stored MD5 # TODO: Set to the stored MD5 when the service returns this - self.properties.content_md5 = None + self.properties.content_md5 = None # type: ignore [attr-defined] - if self.size == 0: - self._current_content = b"" - else: - self._current_content = await process_content( - self._response, - self._initial_offset[0], - self._initial_offset[1], - self._encryption_options - ) + @property + def _download_complete(self): + if is_encryption_v2(self._encryption_data): + return self._download_offset >= self.size + return self._raw_download_offset >= self.size async def _initial_request(self): range_header, range_validation = validate_and_format_range_headers( @@ -259,51 +353,80 @@ async def _initial_request(self): self._initial_range[1], start_range_required=False, end_range_required=False, - check_content_md5=self._validate_content) + check_content_md5=self._validate_content + ) - try: - location_mode, response = await self._clients.blob.download( - range=range_header, - range_get_content_md5=range_validation, - validate_content=self._validate_content, - data_stream_total=None, - download_stream_current=0, - **self._request_options) - - # Check the location we read from to ensure we use the same one - # for subsequent requests. - self._location_mode = location_mode - - # Parse the total file size and adjust the download size if ranges - # were specified - self._file_size = parse_length_from_content_range(response.properties.content_range) - if self._end_range is not None: - # Use the length unless it is over the end of the file - self.size = min(self._file_size, self._end_range - self._start_range + 1) - elif self._start_range is not None: - self.size = self._file_size - self._start_range - else: - self.size = self._file_size + retry_active = True + retry_total = 3 + while retry_active: + try: + location_mode, response = cast(Tuple[Optional[str], Any], await self._clients.blob.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self._validate_content, + data_stream_total=None, + download_stream_current=0, + **self._request_options + )) + + # Check the location we read from to ensure we use the same one + # for subsequent requests. + self._location_mode = location_mode + + # Parse the total file size and adjust the download size if ranges + # were specified + self._file_size = parse_length_from_content_range(response.properties.content_range) + if self._file_size is None: + raise ValueError("Required Content-Range response header is missing or malformed.") + # Remove any extra encryption data size from blob size + self._file_size = adjust_blob_size_for_encryption(self._file_size, self._encryption_data) + + if self._end_range is not None and self._start_range is not None: + # Use the length unless it is over the end of the file + self.size = min(self._file_size - self._start_range, self._end_range - self._start_range + 1) + elif self._start_range is not None: + self.size = self._file_size - self._start_range + else: + self.size = self._file_size - except HttpResponseError as error: - if self._start_range is None and error.response.status_code == 416: - # Get range will fail on an empty file. If the user did not - # request a range, do a regular get request in order to get - # any properties. - try: - _, response = await self._clients.blob.download( - validate_content=self._validate_content, - data_stream_total=0, - download_stream_current=0, - **self._request_options) - except HttpResponseError as error: + except HttpResponseError as error: + if self._start_range is None and error.response and error.status_code == 416: + # Get range will fail on an empty file. If the user did not + # request a range, do a regular get request in order to get + # any properties. + try: + _, response = cast(Tuple[Optional[Any], Any], await self._clients.blob.download( + validate_content=self._validate_content, + data_stream_total=0, + download_stream_current=0, + **self._request_options)) + except HttpResponseError as e: + process_storage_error(e) + + # Set the download size to empty + self.size = 0 + self._file_size = 0 + else: process_storage_error(error) - # Set the download size to empty - self.size = 0 - self._file_size = 0 - else: - process_storage_error(error) + try: + if self.size == 0: + self._current_content = b"" + else: + self._current_content = await process_content( + response, + self._initial_offset[0], + self._initial_offset[1], + self._encryption_options + ) + retry_active = False + except (IncompleteReadError, HttpResponseError, DecodeError) as error: + retry_total -= 1 + if retry_total <= 0: + raise HttpResponseError(error, error=error) from error + await asyncio.sleep(1) + self._download_offset += len(self._current_content) + self._raw_download_offset += response.content_length # get page ranges to optimize downloading sparse page blob if response.properties.blob_type == 'PageBlob': @@ -313,106 +436,266 @@ async def _initial_request(self): except HttpResponseError: pass - # If the file is small, the download is complete at this point. - # If file size is large, download the rest of the file in chunks. - if response.properties.size != self.size: - # Lock on the etag. This can be overridden by the user by specifying '*' - if self._request_options.get('modified_access_conditions'): - if not self._request_options['modified_access_conditions'].if_match: - self._request_options['modified_access_conditions'].if_match = response.properties.etag - else: - self._download_complete = True + if not self._download_complete and self._request_options.get("modified_access_conditions"): + self._request_options["modified_access_conditions"].if_match = response.properties.etag + return response - def chunks(self): - """Iterate over chunks in the download stream. + def chunks(self) -> AsyncIterator[bytes]: + """ + Iterate over chunks in the download stream. Note, the iterator returned will + iterate over the entire download content, regardless of any data that was + previously read. + + NOTE: If the stream has been partially read, some data may be re-downloaded by the iterator. + + :returns: An async iterator of the chunks in the download stream. + :rtype: AsyncIterator[bytes] + + .. admonition:: Example: - :rtype: Iterable[bytes] + .. literalinclude:: ../samples/blob_samples_hello_world_async.py + :start-after: [START download_a_blob_in_chunk] + :end-before: [END download_a_blob_in_chunk] + :language: python + :dedent: 16 + :caption: Download a blob using chunks(). """ - if self.size == 0 or self._download_complete: - iter_downloader = None - else: - data_end = self._file_size - if self._end_range is not None: - # Use the length unless it is over the end of the file - data_end = min(self._file_size, self._end_range + 1) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. chunks is not supported in text mode.") + if self._encoding: + warnings.warn("Encoding is ignored with chunks as only bytes are supported.") + + iter_downloader = None + # If we still have the first chunk buffered, use it. Otherwise, download all content again + if not self._first_chunk or not self._download_complete: + if self._first_chunk: + start = self._download_start + len(self._current_content) + current_progress = len(self._current_content) + else: + start = self._download_start + current_progress = 0 + + end = self._download_start + self.size + iter_downloader = _AsyncChunkDownloader( client=self._clients.blob, non_empty_ranges=self._non_empty_ranges, total_size=self.size, chunk_size=self._config.max_chunk_get_size, - current_progress=self._first_get_size, - start_range=self._initial_range[1] + 1, # Start where the first download ended - end_range=data_end, - stream=None, - parallel=False, + current_progress=current_progress, + start_range=start, + end_range=end, validate_content=self._validate_content, encryption_options=self._encryption_options, + encryption_data=self._encryption_data, use_location=self._location_mode, - **self._request_options) + **self._request_options + ) + + initial_content = self._current_content if self._first_chunk else b'' return _AsyncChunkIterator( size=self.size, - content=self._current_content, - downloader=iter_downloader) + content=cast(bytes, initial_content), + downloader=iter_downloader, + chunk_size=self._config.max_chunk_get_size) - async def readall(self): - """Download the contents of this blob. + @overload + async def read(self, size: int = -1) -> T: + ... - This operation is blocking until all data is downloaded. - :rtype: bytes or str + @overload + async def read(self, *, chars: Optional[int] = None) -> T: + ... + + # pylint: disable-next=too-many-statements,too-many-branches + async def read(self, size: int = -1, *, chars: Optional[int] = None) -> T: """ - stream = BytesIO() - await self.readinto(stream) - data = stream.getvalue() - if self._encoding: - return data.decode(self._encoding) - return data + Read the specified bytes or chars from the stream. If `encoding` + was specified on `download_blob`, it is recommended to use the + chars parameter to read a specific number of chars to avoid decoding + errors. If size/chars is unspecified or negative all bytes will be read. + + :param int size: + The number of bytes to download from the stream. Leave unspecified + or set negative to download all bytes. + :keyword Optional[int] chars: + The number of chars to download from the stream. Leave unspecified + or set negative to download all chars. Note, this can only be used + when encoding is specified on `download_blob`. + :returns: + The requested data as bytes or a string if encoding was specified. If + the return value is empty, there is no more data to read. + :rtype: T + """ + if size > -1 and self._encoding: + warnings.warn( + "Size parameter specified with text encoding enabled. It is recommended to use chars " + "to read a specific number of characters instead." + ) + if size > -1 and chars is not None: + raise ValueError("Cannot specify both size and chars.") + if not self._encoding and chars is not None: + raise ValueError("Must specify encoding to read chars.") + if self._text_mode and size > -1: + raise ValueError("Stream has been partially read in text mode. Please use chars.") + if self._text_mode is False and chars is not None: + raise ValueError("Stream has been partially read in bytes mode. Please use size.") + + # Empty blob or already read to the end + if (size == 0 or chars == 0 or + (self._download_complete and self._current_content_offset >= len(self._current_content))): + return b'' if not self._encoding else '' # type: ignore [return-value] + + if not self._text_mode and chars is not None and self._encoding is not None: + self._text_mode = True + self._decoder = codecs.getincrementaldecoder(self._encoding)('strict') + self._current_content = self._decoder.decode( + cast(bytes, self._current_content), final=self._download_complete) + elif self._text_mode is None: + self._text_mode = False + + output_stream: Union[BytesIO, StringIO] + if self._text_mode: + output_stream = StringIO() + size = chars if chars else sys.maxsize + else: + output_stream = BytesIO() + size = size if size > 0 else sys.maxsize + readall = size == sys.maxsize + count = 0 + + # Start by reading from current_content + start = self._current_content_offset + length = min(len(self._current_content) - self._current_content_offset, size - count) + read = output_stream.write(self._current_content[start:start + length]) # type: ignore [arg-type] + + count += read + self._current_content_offset += read + self._read_offset += read + await self._check_and_report_progress() + + remaining = size - count + if remaining > 0 and not self._download_complete: + # Create a downloader than can download the rest of the file + start = self._download_start + self._download_offset + end = self._download_start + self.size + + parallel = self._max_concurrency > 1 + downloader = _AsyncChunkDownloader( + client=self._clients.blob, + non_empty_ranges=self._non_empty_ranges, + total_size=self.size, + chunk_size=self._config.max_chunk_get_size, + current_progress=self._read_offset, + start_range=start, + end_range=end, + stream=output_stream, + parallel=parallel, + validate_content=self._validate_content, + encryption_options=self._encryption_options, + encryption_data=self._encryption_data, + use_location=self._location_mode, + progress_hook=self._progress_hook, + **self._request_options + ) + self._first_chunk = False + + # When reading all data, have the downloader read everything into the stream. + # Else, read one chunk at a time (using the downloader as an iterator) until + # the requested size is reached. + chunks_iter = downloader.get_chunk_offsets() + if readall and not self._text_mode: + running_futures: Any = [ + asyncio.ensure_future(downloader.process_chunk(d)) + for d in islice(chunks_iter, 0, self._max_concurrency) + ] + while running_futures: + # Wait for some download to finish before adding a new one + done, running_futures = await asyncio.wait( + running_futures, return_when=asyncio.FIRST_COMPLETED) + try: + for task in done: + task.result() + except HttpResponseError as error: + process_storage_error(error) + try: + for _ in range(0, len(done)): + next_chunk = next(chunks_iter) + running_futures.add(asyncio.ensure_future(downloader.process_chunk(next_chunk))) + except StopIteration: + break + + if running_futures: + # Wait for the remaining downloads to finish + done, _running_futures = await asyncio.wait(running_futures) + try: + for task in done: + task.result() + except HttpResponseError as error: + process_storage_error(error) + + self._complete_read() - async def content_as_bytes(self, max_concurrency=1): - """Download the contents of this file. + else: + while (chunk := next(chunks_iter, None)) is not None and remaining > 0: + chunk_data, content_length = await downloader.yield_chunk(chunk) + self._download_offset += len(chunk_data) + self._raw_download_offset += content_length + if self._text_mode and self._decoder is not None: + self._current_content = self._decoder.decode(chunk_data, final=self._download_complete) + else: + self._current_content = chunk_data + + if remaining < len(self._current_content): + read = output_stream.write(self._current_content[:remaining]) # type: ignore [arg-type] + else: + read = output_stream.write(self._current_content) # type: ignore [arg-type] + + self._current_content_offset = read + self._read_offset += read + remaining -= read + await self._check_and_report_progress() + + data = output_stream.getvalue() + if not self._text_mode and self._encoding: + try: + # This is technically incorrect to do, but we have it for backwards compatibility. + data = cast(bytes, data).decode(self._encoding) + except UnicodeDecodeError: + warnings.warn( + "Encountered a decoding error while decoding blob data from a partial read. " + "Try using the `chars` keyword instead to read in text mode." + ) + raise - This operation is blocking until all data is downloaded. + return data # type: ignore [return-value] - :keyword int max_concurrency: - The number of parallel connections with which to download. - :rtype: bytes + async def readall(self) -> T: """ - warnings.warn( - "content_as_bytes is deprecated, use readall instead", - DeprecationWarning - ) - self._max_concurrency = max_concurrency - return await self.readall() - - async def content_as_text(self, max_concurrency=1, encoding="UTF-8"): - """Download the contents of this blob, and decode as text. - + Read the entire contents of this blob. This operation is blocking until all data is downloaded. - :param int max_concurrency: - The number of parallel connections with which to download. - :param str encoding: - Test encoding to decode the downloaded bytes. Default is UTF-8. - :rtype: str + :returns: The requested data as bytes or a string if encoding was specified. + :rtype: T """ - warnings.warn( - "content_as_text is deprecated, use readall instead", - DeprecationWarning - ) - self._max_concurrency = max_concurrency - self._encoding = encoding - return await self.readall() + return await self.read() - async def readinto(self, stream): + async def readinto(self, stream: IO[bytes]) -> int: """Download the contents of this blob to a stream. - :param stream: + :param IO[bytes] stream: The stream to download to. This can be an open file-handle, or any writable stream. The stream must be seekable if the download uses more than one parallel connection. :returns: The number of bytes read. :rtype: int """ + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. readinto is not supported in text mode.") + if self._encoding: + warnings.warn("Encoding is ignored with readinto as only byte streams are supported.") + # the stream must be seekable if parallel download is required parallel = self._max_concurrency > 1 if parallel: @@ -422,39 +705,55 @@ async def readinto(self, stream): try: stream.seek(stream.tell()) - except (NotImplementedError, AttributeError): - raise ValueError(error_message) - - # Write the content to the user stream - stream.write(self._current_content) + except (NotImplementedError, AttributeError) as exc: + raise ValueError(error_message) from exc + + # If some data has been streamed using `read`, only stream the remaining data + remaining_size = self.size - self._read_offset + # Already read to the end + if remaining_size <= 0: + return 0 + + # Write the current content to the user stream + current_remaining = len(self._current_content) - self._current_content_offset + start = self._current_content_offset + count = stream.write(cast(bytes, self._current_content[start:start + current_remaining])) + + self._current_content_offset += count + self._read_offset += count + if self._progress_hook: + await self._progress_hook(self._read_offset, self.size) + + # If all the data was already downloaded/buffered if self._download_complete: - return self.size + return remaining_size - data_end = self._file_size - if self._end_range is not None: - # Use the length unless it is over the end of the file - data_end = min(self._file_size, self._end_range + 1) + data_start = self._download_start + self._read_offset + data_end = self._download_start + self.size downloader = _AsyncChunkDownloader( client=self._clients.blob, non_empty_ranges=self._non_empty_ranges, total_size=self.size, chunk_size=self._config.max_chunk_get_size, - current_progress=self._first_get_size, - start_range=self._initial_range[1] + 1, # start where the first download ended + current_progress=self._read_offset, + start_range=data_start, end_range=data_end, stream=stream, parallel=parallel, validate_content=self._validate_content, encryption_options=self._encryption_options, + encryption_data=self._encryption_data, use_location=self._location_mode, - **self._request_options) + progress_hook=self._progress_hook, + **self._request_options + ) dl_tasks = downloader.get_chunk_offsets() - running_futures = [ + running_futures = { asyncio.ensure_future(downloader.process_chunk(d)) for d in islice(dl_tasks, 0, self._max_concurrency) - ] + } while running_futures: # Wait for some download to finish before adding a new one done, running_futures = await asyncio.wait( @@ -465,11 +764,11 @@ async def readinto(self, stream): except HttpResponseError as error: process_storage_error(error) try: - next_chunk = next(dl_tasks) + for _ in range(0, len(done)): + next_chunk = next(dl_tasks) + running_futures.add(asyncio.ensure_future(downloader.process_chunk(next_chunk))) except StopIteration: break - else: - running_futures.add(asyncio.ensure_future(downloader.process_chunk(next_chunk))) if running_futures: # Wait for the remaining downloads to finish @@ -479,12 +778,79 @@ async def readinto(self, stream): task.result() except HttpResponseError as error: process_storage_error(error) - return self.size + + self._complete_read() + return remaining_size + + def _complete_read(self): + """Adjusts all offsets to the end of the download.""" + self._download_offset = self.size + self._raw_download_offset = self.size + self._read_offset = self.size + self._current_content_offset = len(self._current_content) + + async def _check_and_report_progress(self): + """Reports progress if necessary.""" + # Only report progress at the end of each chunk and use download_offset to always report + # progress in terms of (approximate) byte count. + if self._progress_hook and self._current_content_offset == len(self._current_content): + await self._progress_hook(self._download_offset, self.size) + + async def content_as_bytes(self, max_concurrency=1): + """DEPRECATED: Download the contents of this file. + + This operation is blocking until all data is downloaded. + + This method is deprecated, use func:`readall` instead. + + :param int max_concurrency: + The number of parallel connections with which to download. + :returns: The contents of the file as bytes. + :rtype: bytes + """ + warnings.warn( + "content_as_bytes is deprecated, use readall instead", + DeprecationWarning + ) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. " + "content_as_bytes is not supported in text mode.") + + self._max_concurrency = max_concurrency + return await self.readall() + + async def content_as_text(self, max_concurrency=1, encoding="UTF-8"): + """DEPRECATED: Download the contents of this blob, and decode as text. + + This operation is blocking until all data is downloaded. + + This method is deprecated, use func:`readall` instead. + + :param int max_concurrency: + The number of parallel connections with which to download. + :param str encoding: + Test encoding to decode the downloaded bytes. Default is UTF-8. + :returns: The content of the file as a str. + :rtype: str + """ + warnings.warn( + "content_as_text is deprecated, use readall instead", + DeprecationWarning + ) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. " + "content_as_text is not supported in text mode.") + + self._max_concurrency = max_concurrency + self._encoding = encoding + return await self.readall() async def download_to_stream(self, stream, max_concurrency=1): - """Download the contents of this blob to a stream. + """DEPRECATED: Download the contents of this blob to a stream. - :param stream: + This method is deprecated, use func:`readinto` instead. + + :param IO[T] stream: The stream to download to. This can be an open file-handle, or any writable stream. The stream must be seekable if the download uses more than one parallel connection. @@ -497,6 +863,10 @@ async def download_to_stream(self, stream, max_concurrency=1): "download_to_stream is deprecated, use readinto instead", DeprecationWarning ) + if self._text_mode: + raise ValueError("Stream has been partially read in text mode. " + "download_to_stream is not supported in text mode.") + self._max_concurrency = max_concurrency await self.readinto(stream) return self.properties diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_encryption_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_encryption_async.py new file mode 100644 index 000000000000..97334d96da59 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_encryption_async.py @@ -0,0 +1,72 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import inspect +import sys +from io import BytesIO +from typing import IO + +from .._encryption import _GCM_REGION_DATA_LENGTH, encrypt_data_v2 + + +class GCMBlobEncryptionStream: + """ + An async stream that performs AES-GCM encryption on the given data as + it's streamed. Data is read and encrypted in regions. The stream + will use the same encryption key and will generate a guaranteed unique + nonce for each encryption region. + """ + def __init__( + self, content_encryption_key: bytes, + data_stream: IO[bytes], + ) -> None: + """ + :param bytes content_encryption_key: The encryption key to use. + :param IO[bytes] data_stream: The data stream to read data from. + """ + self.content_encryption_key = content_encryption_key + self.data_stream = data_stream + + self.offset = 0 + self.current = b'' + self.nonce_counter = 0 + + async def read(self, size: int = -1) -> bytes: + """ + Read data from the stream. Specify -1 to read all available data. + + :param int size: The amount of data to read. Defaults to -1 for all data. + :return: The bytes read. + :rtype: bytes + """ + result = BytesIO() + remaining = sys.maxsize if size == -1 else size + + while remaining > 0: + # Start by reading from current + if len(self.current) > 0: + read = min(remaining, len(self.current)) + result.write(self.current[:read]) + + self.current = self.current[read:] + self.offset += read + remaining -= read + + if remaining > 0: + # Read one region of data and encrypt it + data = self.data_stream.read(_GCM_REGION_DATA_LENGTH) + if inspect.isawaitable(data): + data = await data + + if len(data) == 0: + # No more data to read + break + + self.current = encrypt_data_v2(data, self.nonce_counter, self.content_encryption_key) + # IMPORTANT: Must increment the nonce each time. + self.nonce_counter += 1 + + return result.getvalue() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_lease_async.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_lease_async.py index 79e67337dd62..b5bfad95f53f 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_lease_async.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_lease_async.py @@ -3,51 +3,57 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=invalid-overridden-method +# pylint: disable=docstring-keyword-should-match-keyword-only -from typing import ( # pylint: disable=unused-import - Union, Optional, Any, IO, Iterable, AnyStr, Dict, List, Tuple, - TypeVar, TYPE_CHECKING -) +import uuid +from typing import Any, Optional, Union, TYPE_CHECKING from azure.core.exceptions import HttpResponseError from azure.core.tracing.decorator_async import distributed_trace_async -from .._shared.response_handlers import return_response_headers, process_storage_error +from .._shared.response_handlers import process_storage_error, return_response_headers from .._serialize import get_modify_conditions -from .._lease import BlobLeaseClient as LeaseClientBase if TYPE_CHECKING: + from azure.storage.blob.aio import BlobClient, ContainerClient from datetime import datetime - from .._generated.operations import BlobOperations, ContainerOperations - BlobClient = TypeVar("BlobClient") - ContainerClient = TypeVar("ContainerClient") -class BlobLeaseClient(LeaseClientBase): +class BlobLeaseClient(): # pylint: disable=client-accepts-api-version-keyword """Creates a new BlobLeaseClient. This client provides lease operations on a BlobClient or ContainerClient. - - :ivar str id: - The ID of the lease currently being maintained. This will be `None` if no - lease has yet been acquired. - :ivar str etag: - The ETag of the lease currently being maintained. This will be `None` if no - lease has yet been acquired or modified. - :ivar ~datetime.datetime last_modified: - The last modified timestamp of the lease currently being maintained. - This will be `None` if no lease has yet been acquired or modified. - - :param client: - The client of the blob or container to lease. - :type client: ~azure.storage.blob.aio.BlobClient or - ~azure.storage.blob.aio.ContainerClient - :param str lease_id: - A string representing the lease ID of an existing lease. This value does not - need to be specified in order to acquire a new lease, or break one. + :param client: The client of the blob or container to lease. + :type client: Union[BlobClient, ContainerClient] + :param lease_id: A string representing the lease ID of an existing lease. This value does not need to be + specified in order to acquire a new lease, or break one. + :type lease_id: Optional[str] """ + id: str + """The ID of the lease currently being maintained. This will be `None` if no + lease has yet been acquired.""" + etag: Optional[str] + """The ETag of the lease currently being maintained. This will be `None` if no + lease has yet been acquired or modified.""" + last_modified: Optional["datetime"] + """The last modified timestamp of the lease currently being maintained. + This will be `None` if no lease has yet been acquired or modified.""" + + def __init__( # pylint: disable=missing-client-constructor-parameter-credential, missing-client-constructor-parameter-kwargs + self, client: Union["BlobClient", "ContainerClient"], + lease_id: Optional[str] = None + ) -> None: + self.id = lease_id or str(uuid.uuid4()) + self.last_modified = None + self.etag = None + if hasattr(client, 'blob_name'): + self._client = client._client.blob + elif hasattr(client, 'container_name'): + self._client = client._client.container + else: + raise TypeError("Lease must use either BlobClient or ContainerClient.") + def __enter__(self): raise TypeError("Async lease must use 'async with'.") @@ -61,8 +67,7 @@ async def __aexit__(self, *args): await self.release() @distributed_trace_async - async def acquire(self, lease_duration=-1, **kwargs): - # type: (int, Any) -> None + async def acquire(self, lease_duration: int = -1, **kwargs: Any) -> None: """Requests a new lease. If the container does not have an active lease, the Blob service creates a @@ -97,12 +102,16 @@ async def acquire(self, lease_duration=-1, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :rtype: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = await self._client.acquire_lease( + response: Any = await self._client.acquire_lease( timeout=kwargs.pop('timeout', None), duration=lease_duration, proposed_lease_id=self.id, @@ -111,13 +120,12 @@ async def acquire(self, lease_duration=-1, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime - self.etag = response.get('etag') # type: str + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') + self.etag = response.get('etag') @distributed_trace_async - async def renew(self, **kwargs): - # type: (Any) -> None + async def renew(self, **kwargs: Any) -> None: """Renews the lease. The lease can be renewed if the lease ID specified in the @@ -150,12 +158,16 @@ async def renew(self, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = await self._client.renew_lease( + response: Any = await self._client.renew_lease( lease_id=self.id, timeout=kwargs.pop('timeout', None), modified_access_conditions=mod_conditions, @@ -163,13 +175,12 @@ async def renew(self, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.etag = response.get('etag') # type: str - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') @distributed_trace_async - async def release(self, **kwargs): - # type: (Any) -> None + async def release(self, **kwargs: Any) -> None: """Release the lease. The lease may be released if the client lease id specified matches @@ -200,12 +211,16 @@ async def release(self, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = await self._client.release_lease( + response: Any = await self._client.release_lease( lease_id=self.id, timeout=kwargs.pop('timeout', None), modified_access_conditions=mod_conditions, @@ -213,13 +228,12 @@ async def release(self, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.etag = response.get('etag') # type: str - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') @distributed_trace_async - async def change(self, proposed_lease_id, **kwargs): - # type: (str, Any) -> None + async def change(self, proposed_lease_id: str, **kwargs: Any) -> None: """Change the lease ID of an active lease. :param str proposed_lease_id: @@ -249,12 +263,16 @@ async def change(self, proposed_lease_id, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: None """ mod_conditions = get_modify_conditions(kwargs) try: - response = await self._client.change_lease( + response: Any = await self._client.change_lease( lease_id=self.id, proposed_lease_id=proposed_lease_id, timeout=kwargs.pop('timeout', None), @@ -263,13 +281,12 @@ async def change(self, proposed_lease_id, **kwargs): **kwargs) except HttpResponseError as error: process_storage_error(error) - self.etag = response.get('etag') # type: str - self.id = response.get('lease_id') # type: str - self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') + self.id = response.get('lease_id') + self.last_modified = response.get('last_modified') @distributed_trace_async - async def break_lease(self, lease_break_period=None, **kwargs): - # type: (Optional[int], Any) -> int + async def break_lease(self, lease_break_period: Optional[int] = None, **kwargs: Any) -> int: """Break the lease, if the container or blob has an active lease. Once a lease is broken, it cannot be renewed. Any authorized request can break the lease; @@ -308,13 +325,17 @@ async def break_lease(self, lease_break_period=None, **kwargs): .. versionadded:: 12.4.0 :keyword int timeout: - The timeout parameter is expressed in seconds. + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. :return: Approximate time remaining in the lease period, in seconds. :rtype: int """ mod_conditions = get_modify_conditions(kwargs) try: - response = await self._client.break_lease( + response: Any = await self._client.break_lease( timeout=kwargs.pop('timeout', None), break_period=lease_break_period, modified_access_conditions=mod_conditions, diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_list_blobs_helper.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_list_blobs_helper.py index 058572fd270d..1731a3186c40 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_list_blobs_helper.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_list_blobs_helper.py @@ -1,56 +1,65 @@ -# pylint: disable=too-many-lines # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -from azure.core.async_paging import AsyncPageIterator, AsyncItemPaged +from typing import Callable, List, Optional +from urllib.parse import unquote + +from azure.core.async_paging import AsyncItemPaged, AsyncPageIterator from azure.core.exceptions import HttpResponseError -from .._deserialize import get_blob_properties_from_generated_code -from .._models import BlobProperties + +from .._deserialize import ( + get_blob_properties_from_generated_code, + load_many_xml_nodes, + load_xml_int, + load_xml_string +) from .._generated.models import BlobItemInternal, BlobPrefix as GenBlobPrefix +from .._models import BlobProperties from .._shared.models import DictMixin -from .._shared.response_handlers import return_context_and_deserialized, process_storage_error +from .._shared.response_handlers import ( + process_storage_error, + return_context_and_deserialized, + return_raw_deserialized +) class BlobPropertiesPaged(AsyncPageIterator): - """An Iterable of Blob properties. + """An Iterable of Blob properties.""" + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + container: Optional[str] + """The container that the blobs are listed from.""" + delimiter: Optional[str] + """A delimiting character used for hierarchy listing.""" + command: Callable + """Function to retrieve the next page of items.""" - :ivar str service_endpoint: The service URL. - :ivar str prefix: A blob name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.models.BlobProperties) - :ivar str container: The container that the blobs are listed from. - :ivar str delimiter: A delimiting character used for hierarchy listing. - - :param callable command: Function to retrieve the next page of items. - :param str container: The container that the blobs are listed from. - :param str prefix: Filters the results to return only blobs whose names - begin with the specified prefix. - :param int results_per_page: The maximum number of blobs to retrieve per - call. - :param str continuation_token: An opaque continuation token. - :param str delimiter: - Used to capture blobs whose names begin with the same substring up to - the appearance of the delimiter character. The delimiter may be a single - character or a string. - :param location_mode: Specifies the location the request should be sent to. - This mode only applies for RA-GRS accounts which allow secondary read access. - Options include 'primary' or 'secondary'. - """ def __init__( - self, command, - container=None, - prefix=None, - results_per_page=None, - continuation_token=None, - delimiter=None, - location_mode=None): + self, command: Callable, + container: Optional[str] = None, + prefix: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None, + delimiter: Optional[str] = None, + location_mode: Optional[str] = None, + ) -> None: super(BlobPropertiesPaged, self).__init__( get_next=self._get_next_cb, extract_data=self._extract_data_cb, @@ -92,42 +101,115 @@ def _build_item(self, item): if isinstance(item, BlobProperties): return item if isinstance(item, BlobItemInternal): - blob = get_blob_properties_from_generated_code(item) # pylint: disable=protected-access - blob.container = self.container + blob = get_blob_properties_from_generated_code(item) + blob.container = self.container # type: ignore [assignment] return blob return item +class BlobNamesPaged(AsyncPageIterator): + """An Iterable of Blob names.""" + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of blobs to retrieve per call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + container: Optional[str] + """The container that the blobs are listed from.""" + delimiter: Optional[str] + """A delimiting character used for hierarchy listing.""" + command: Callable + """Function to retrieve the next page of items.""" + + def __init__( + self, command: Callable, + container: Optional[str] = None, + prefix: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None, + location_mode: Optional[str] = None + ) -> None: + super(BlobNamesPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.service_endpoint = None + self.prefix = prefix + self.marker = None + self.results_per_page = results_per_page + self.container = container + self.current_page = None + self.location_mode = location_mode + + async def _get_next_cb(self, continuation_token): + try: + return await self._command( + prefix=self.prefix, + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_raw_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + async def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.service_endpoint = self._response.get('ServiceEndpoint') + self.prefix = load_xml_string(self._response, 'Prefix') + self.marker = load_xml_string(self._response, 'Marker') + self.results_per_page = load_xml_int(self._response, 'MaxResults') + self.container = self._response.get('ContainerName') + + blobs = load_many_xml_nodes(self._response, 'Blob', wrapper='Blobs') + self.current_page = [load_xml_string(blob, 'Name') for blob in blobs] + + next_marker = load_xml_string(self._response, 'NextMarker') + return next_marker or None, self.current_page + + class BlobPrefix(AsyncItemPaged, DictMixin): """An Iterable of Blob properties. Returned from walk_blobs when a delimiter is used. - Can be thought of as a virtual blob directory. - - :ivar str name: The prefix, or "directory name" of the blob. - :ivar str prefix: A blob name prefix being used to filter the list. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str marker: The continuation token of the current page of results. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.models.BlobProperties) - :ivar str container: The container that the blobs are listed from. - :ivar str delimiter: A delimiting character used for hierarchy listing. - :param callable command: Function to retrieve the next page of items. - :param str prefix: Filters the results to return only blobs whose names - begin with the specified prefix. - :param int results_per_page: The maximum number of blobs to retrieve per - call. - :param str marker: An opaque continuation token. - :param str delimiter: - Used to capture blobs whose names begin with the same substring up to - the appearance of the delimiter character. The delimiter may be a single - character or a string. - :param location_mode: Specifies the location the request should be sent to. - This mode only applies for RA-GRS accounts which allow secondary read access. - Options include 'primary' or 'secondary'. - """ + Can be thought of as a virtual blob directory.""" + + name: str + """The prefix, or "directory name" of the blob.""" + service_endpoint: Optional[str] + """The service URL.""" + prefix: str + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + next_marker: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: str + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List[BlobProperties]] + """The current page of listed results.""" + delimiter: str + """A delimiting character used for hierarchy listing.""" + command: Callable + """Function to retrieve the next page of items.""" + container: str + """The name of the container.""" + def __init__(self, *args, **kwargs): super(BlobPrefix, self).__init__(*args, page_iterator_class=BlobPrefixPaged, **kwargs) self.name = kwargs.get('prefix') @@ -154,10 +236,14 @@ async def _extract_data_cb(self, get_next_return): def _build_item(self, item): item = super(BlobPrefixPaged, self)._build_item(item) if isinstance(item, GenBlobPrefix): + if item.name.encoded: + name = unquote(item.name.content) + else: + name = item.name.content return BlobPrefix( self._command, container=self.container, - prefix=item.name, + prefix=name, results_per_page=self.results_per_page, location_mode=self.location_mode) return item diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_models.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_models.py index 05edd78e0d9d..27d1d8fa3c0b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_models.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_models.py @@ -3,38 +3,49 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=too-few-public-methods, too-many-instance-attributes -# pylint: disable=super-init-not-called, too-many-lines +# pylint: disable=too-few-public-methods + +from typing import Callable, List, Optional, TYPE_CHECKING from azure.core.async_paging import AsyncPageIterator from azure.core.exceptions import HttpResponseError -from .._deserialize import parse_tags - -from .._models import ContainerProperties, FilteredBlob -from .._shared.response_handlers import return_context_and_deserialized, process_storage_error +from .._deserialize import parse_tags from .._generated.models import FilterBlobItem +from .._models import ContainerProperties, FilteredBlob, parse_page_list +from .._shared.response_handlers import process_storage_error, return_context_and_deserialized + +if TYPE_CHECKING: + from .._models import BlobProperties class ContainerPropertiesPaged(AsyncPageIterator): """An Iterable of Container properties. - :ivar str service_endpoint: The service URL. - :ivar str prefix: A container name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.models.ContainerProperties) - - :param callable command: Function to retrieve the next page of items. - :param str prefix: Filters the results to return only containers whose names + :param Callable command: Function to retrieve the next page of items. + :param Optional[str] prefix: Filters the results to return only containers whose names begin with the specified prefix. - :param int results_per_page: The maximum number of container names to retrieve per + :param Optional[int] results_per_page: The maximum number of container names to retrieve per call. - :param str continuation_token: An opaque continuation token. + :param Optional[str] continuation_token: An opaque continuation token. """ + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A container name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: List[ContainerProperties] + """The current page of listed results.""" + def __init__(self, command, prefix=None, results_per_page=None, continuation_token=None): super(ContainerPropertiesPaged, self).__init__( get_next=self._get_next_cb, @@ -77,32 +88,41 @@ def _build_item(item): class FilteredBlobPaged(AsyncPageIterator): """An Iterable of Blob properties. - :ivar str service_endpoint: The service URL. - :ivar str prefix: A blob name prefix being used to filter the list. - :ivar str marker: The continuation token of the current page of results. - :ivar int results_per_page: The maximum number of results retrieved per API call. - :ivar str continuation_token: The continuation token to retrieve the next page of results. - :ivar str location_mode: The location mode being used to list results. The available - options include "primary" and "secondary". - :ivar current_page: The current page of listed results. - :vartype current_page: list(~azure.storage.blob.BlobProperties) - :ivar str container: The container that the blobs are listed from. - - :param callable command: Function to retrieve the next page of items. - :param str container: The name of the container. - :param int results_per_page: The maximum number of blobs to retrieve per + :param Callable command: Function to retrieve the next page of items. + :param Optional[str] container: The name of the container. + :param Optional[int] results_per_page: The maximum number of blobs to retrieve per call. - :param str continuation_token: An opaque continuation token. - :param location_mode: Specifies the location the request should be sent to. - This mode only applies for RA-GRS accounts which allow secondary read access. - Options include 'primary' or 'secondary'. + :param Optional[str] continuation_token: An opaque continuation token. + :param Optional[str] location_mode: + Specifies the location the request should be sent to. This mode only applies for RA-GRS accounts + which allow secondary read access. Options include 'primary' or 'secondary'. """ + + service_endpoint: Optional[str] + """The service URL.""" + prefix: Optional[str] + """A blob name prefix being used to filter the list.""" + marker: Optional[str] + """The continuation token of the current page of results.""" + results_per_page: Optional[int] + """The maximum number of results retrieved per API call.""" + continuation_token: Optional[str] + """The continuation token to retrieve the next page of results.""" + location_mode: Optional[str] + """The location mode being used to list results. The available + options include "primary" and "secondary".""" + current_page: Optional[List["BlobProperties"]] + """The current page of listed results.""" + container: Optional[str] + """The container that the blobs are listed from.""" + def __init__( - self, command, - container=None, - results_per_page=None, - continuation_token=None, - location_mode=None): + self, command: Callable, + container: Optional[str] = None, + results_per_page: Optional[int] = None, + continuation_token: Optional[str] = None, + location_mode: Optional[str] = None + ) -> None: super(FilteredBlobPaged, self).__init__( get_next=self._get_next_cb, extract_data=self._extract_data_cb, @@ -141,3 +161,39 @@ def _build_item(item): blob = FilteredBlob(name=item.name, container_name=item.container_name, tags=tags) return blob return item + + +class PageRangePaged(AsyncPageIterator): + def __init__(self, command, results_per_page=None, continuation_token=None): + super(PageRangePaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.results_per_page = results_per_page + self.location_mode = None + self.current_page = [] + + async def _get_next_cb(self, continuation_token): + try: + return await self._command( + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + async def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.current_page = self._build_page(self._response) + + return self._response.next_marker or None, self.current_page + + @staticmethod + def _build_page(response): + if not response: + raise StopIteration + + return parse_page_list(response) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_upload_helpers.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_upload_helpers.py index 36d1e4498e5e..794beee36e3b 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_upload_helpers.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/aio/_upload_helpers.py @@ -3,71 +3,87 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=no-self-use +import inspect from io import SEEK_SET, UnsupportedOperation -from typing import Optional, Union, Any, TypeVar, TYPE_CHECKING # pylint: disable=unused-import +from typing import Any, cast, Dict, IO, Optional, TypeVar, TYPE_CHECKING -import six -from azure.core.exceptions import ResourceModifiedError, HttpResponseError +from azure.core.exceptions import HttpResponseError, ResourceModifiedError -from .._shared.response_handlers import ( - process_storage_error, - return_response_headers) +from ._encryption_async import GCMBlobEncryptionStream +from .._encryption import ( + encrypt_blob, + get_adjusted_upload_size, + get_blob_encryptor_and_padder, + generate_blob_encryption_data, + _ENCRYPTION_PROTOCOL_V1, + _ENCRYPTION_PROTOCOL_V2 +) +from .._generated.models import ( + AppendPositionAccessConditions, + BlockLookupList, + ModifiedAccessConditions +) +from .._shared.response_handlers import process_storage_error, return_response_headers from .._shared.uploads_async import ( - upload_data_chunks, - upload_substream_blocks, + AppendBlobChunkUploader, BlockBlobChunkUploader, PageBlobChunkUploader, - AppendBlobChunkUploader) -from .._shared.encryption import generate_blob_encryption_data, encrypt_blob -from .._generated.models import ( - BlockLookupList, - AppendPositionAccessConditions, - ModifiedAccessConditions, + upload_data_chunks, + upload_substream_blocks ) -from .._upload_helpers import _convert_mod_error, _any_conditions +from .._upload_helpers import _any_conditions, _convert_mod_error if TYPE_CHECKING: - from datetime import datetime # pylint: disable=unused-import + from .._generated.aio.operations import AppendBlobOperations, BlockBlobOperations, PageBlobOperations + from .._shared.models import StorageConfiguration BlobLeaseClient = TypeVar("BlobLeaseClient") -async def upload_block_blob( # pylint: disable=too-many-locals - client=None, - data=None, - stream=None, - length=None, - overwrite=None, - headers=None, - validate_content=None, - max_concurrency=None, - blob_settings=None, - encryption_options=None, - **kwargs): +async def upload_block_blob( # pylint: disable=too-many-locals, too-many-statements + client: "BlockBlobOperations", + stream: IO, + overwrite: bool, + encryption_options: Dict[str, Any], + blob_settings: "StorageConfiguration", + headers: Dict[str, Any], + validate_content: bool, + max_concurrency: Optional[int], + length: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: try: if not overwrite and not _any_conditions(**kwargs): kwargs['modified_access_conditions'].if_none_match = '*' adjusted_count = length if (encryption_options.get('key') is not None) and (adjusted_count is not None): - adjusted_count += (16 - (length % 16)) + adjusted_count = get_adjusted_upload_size(adjusted_count, encryption_options['version']) blob_headers = kwargs.pop('blob_headers', None) tier = kwargs.pop('standard_blob_tier', None) blob_tags_string = kwargs.pop('blob_tags_string', None) + immutability_policy = kwargs.pop('immutability_policy', None) + immutability_policy_expiry = None if immutability_policy is None else immutability_policy.expiry_time + immutability_policy_mode = None if immutability_policy is None else immutability_policy.policy_mode + legal_hold = kwargs.pop('legal_hold', None) + progress_hook = kwargs.pop('progress_hook', None) + # Do single put if the size is smaller than config.max_single_put_size if adjusted_count is not None and (adjusted_count <= blob_settings.max_single_put_size): - try: - data = data.read(length) - if not isinstance(data, six.binary_type): - raise TypeError('Blob data should be of type bytes.') - except AttributeError: - pass + data = stream.read(length or -1) + if inspect.isawaitable(data): + data = await data + if not isinstance(data, bytes): + raise TypeError('Blob data should be of type bytes.') + if encryption_options.get('key'): - encryption_data, data = encrypt_blob(data, encryption_options['key']) + if not isinstance(data, bytes): + raise TypeError('Blob data should be of type bytes.') + encryption_data, data = encrypt_blob(data, encryption_options['key'], encryption_options['version']) headers['x-ms-meta-encryptiondata'] = encryption_data - return await client.upload( - body=data, + + response = cast(Dict[str, Any], await client.upload( + body=data, # type: ignore [arg-type] content_length=adjusted_count, blob_http_headers=blob_headers, headers=headers, @@ -77,7 +93,15 @@ async def upload_block_blob( # pylint: disable=too-many-locals upload_stream_current=0, tier=tier.value if tier else None, blob_tags_string=blob_tags_string, - **kwargs) + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + **kwargs)) + + if progress_hook: + await progress_hook(adjusted_count, adjusted_count) + + return response use_original_upload_path = blob_settings.use_byte_buffer or \ validate_content or encryption_options.get('required') or \ @@ -86,20 +110,37 @@ async def upload_block_blob( # pylint: disable=too-many-locals not hasattr(stream, 'seek') or not hasattr(stream, 'tell') if use_original_upload_path: - if encryption_options.get('key'): - cek, iv, encryption_data = generate_blob_encryption_data(encryption_options['key']) - headers['x-ms-meta-encryptiondata'] = encryption_data - encryption_options['cek'] = cek - encryption_options['vector'] = iv + total_size = length + encryptor, padder = None, None + if encryption_options and encryption_options.get('key'): + cek, iv, encryption_metadata = generate_blob_encryption_data( + encryption_options['key'], + encryption_options['version']) + headers['x-ms-meta-encryptiondata'] = encryption_metadata + + if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V1: + encryptor, padder = get_blob_encryptor_and_padder(cek, iv, True) + + # Adjust total_size for encryption V2 + if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V2: + # Adjust total_size for encryption V2 + total_size = adjusted_count + # V2 wraps the data stream with an encryption stream + if cek is None: + raise ValueError("Generate encryption metadata failed. 'cek' is None.") + stream = GCMBlobEncryptionStream(cek, stream) # type: ignore [assignment] + block_ids = await upload_data_chunks( service=client, uploader_class=BlockBlobChunkUploader, - total_size=length, + total_size=total_size, chunk_size=blob_settings.max_block_size, max_concurrency=max_concurrency, stream=stream, validate_content=validate_content, - encryption_options=encryption_options, + progress_hook=progress_hook, + encryptor=encryptor, + padder=padder, headers=headers, **kwargs ) @@ -112,13 +153,14 @@ async def upload_block_blob( # pylint: disable=too-many-locals max_concurrency=max_concurrency, stream=stream, validate_content=validate_content, + progress_hook=progress_hook, headers=headers, **kwargs ) block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) block_lookup.latest = block_ids - return await client.commit_block_list( + return cast(Dict[str, Any], await client.commit_block_list( block_lookup, blob_http_headers=blob_headers, cls=return_response_headers, @@ -126,7 +168,10 @@ async def upload_block_blob( # pylint: disable=too-many-locals headers=headers, tier=tier.value if tier else None, blob_tags_string=blob_tags_string, - **kwargs) + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + **kwargs)) except HttpResponseError as error: try: process_storage_error(error) @@ -137,48 +182,63 @@ async def upload_block_blob( # pylint: disable=too-many-locals async def upload_page_blob( - client=None, - stream=None, - length=None, - overwrite=None, - headers=None, - validate_content=None, - max_concurrency=None, - blob_settings=None, - encryption_options=None, - **kwargs): + client: "PageBlobOperations", + overwrite: bool, + encryption_options: Dict[str, Any], + blob_settings: "StorageConfiguration", + headers: Dict[str, Any], + stream: IO, + length: Optional[int] = None, + validate_content: Optional[bool] = None, + max_concurrency: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: try: if not overwrite and not _any_conditions(**kwargs): kwargs['modified_access_conditions'].if_none_match = '*' if length is None or length < 0: raise ValueError("A content length must be specified for a Page Blob.") if length % 512 != 0: - raise ValueError("Invalid page blob size: {0}. " - "The size must be aligned to a 512-byte boundary.".format(length)) + raise ValueError(f"Invalid page blob size: {length}. " + "The size must be aligned to a 512-byte boundary.") + tier = None if kwargs.get('premium_page_blob_tier'): premium_page_blob_tier = kwargs.pop('premium_page_blob_tier') try: - headers['x-ms-access-tier'] = premium_page_blob_tier.value + tier = premium_page_blob_tier.value except AttributeError: - headers['x-ms-access-tier'] = premium_page_blob_tier - if encryption_options and encryption_options.get('data'): - headers['x-ms-meta-encryptiondata'] = encryption_options['data'] + tier = premium_page_blob_tier + + if encryption_options and encryption_options.get('key'): + cek, iv, encryption_data = generate_blob_encryption_data( + encryption_options['key'], + encryption_options['version']) + headers['x-ms-meta-encryptiondata'] = encryption_data + blob_tags_string = kwargs.pop('blob_tags_string', None) + progress_hook = kwargs.pop('progress_hook', None) - response = await client.create( + response = cast(Dict[str, Any], await client.create( content_length=0, blob_content_length=length, - blob_sequence_number=None, + blob_sequence_number=None, # type: ignore [arg-type] blob_http_headers=kwargs.pop('blob_headers', None), blob_tags_string=blob_tags_string, + tier=tier, cls=return_response_headers, headers=headers, - **kwargs) + **kwargs)) if length == 0: - return response + return cast(Dict[str, Any], response) + + if encryption_options and encryption_options.get('key'): + if encryption_options['version'] == _ENCRYPTION_PROTOCOL_V1: + encryptor, padder = get_blob_encryptor_and_padder(cek, iv, False) + kwargs['encryptor'] = encryptor + kwargs['padder'] = padder kwargs['modified_access_conditions'] = ModifiedAccessConditions(if_match=response['etag']) - return await upload_data_chunks( + return cast(Dict[str, Any], await upload_data_chunks( service=client, uploader_class=PageBlobChunkUploader, total_size=length, @@ -186,9 +246,9 @@ async def upload_page_blob( stream=stream, max_concurrency=max_concurrency, validate_content=validate_content, - encryption_options=encryption_options, + progress_hook=progress_hook, headers=headers, - **kwargs) + **kwargs)) except HttpResponseError as error: try: @@ -200,16 +260,17 @@ async def upload_page_blob( async def upload_append_blob( # pylint: disable=unused-argument - client=None, - stream=None, - length=None, - overwrite=None, - headers=None, - validate_content=None, - max_concurrency=None, - blob_settings=None, - encryption_options=None, - **kwargs): + client: "AppendBlobOperations", + overwrite: bool, + encryption_options: Dict[str, Any], + blob_settings: "StorageConfiguration", + headers: Dict[str, Any], + stream: IO, + length: Optional[int] = None, + validate_content: Optional[bool] = None, + max_concurrency: Optional[int] = None, + **kwargs: Any +) -> Dict[str, Any]: try: if length == 0: return {} @@ -218,6 +279,7 @@ async def upload_append_blob( # pylint: disable=unused-argument max_size=kwargs.pop('maxsize_condition', None), append_position=None) blob_tags_string = kwargs.pop('blob_tags_string', None) + progress_hook = kwargs.pop('progress_hook', None) try: if overwrite: @@ -227,7 +289,7 @@ async def upload_append_blob( # pylint: disable=unused-argument headers=headers, blob_tags_string=blob_tags_string, **kwargs) - return await upload_data_chunks( + return cast(Dict[str, Any], await upload_data_chunks( service=client, uploader_class=AppendBlobChunkUploader, total_size=length, @@ -236,26 +298,27 @@ async def upload_append_blob( # pylint: disable=unused-argument max_concurrency=max_concurrency, validate_content=validate_content, append_position_access_conditions=append_conditions, + progress_hook=progress_hook, headers=headers, - **kwargs) + **kwargs)) except HttpResponseError as error: - if error.response.status_code != 404: + if error.response.status_code != 404: # type: ignore [union-attr] raise # rewind the request body if it is a stream if hasattr(stream, 'read'): try: # attempt to rewind the body to the initial position stream.seek(0, SEEK_SET) - except UnsupportedOperation: + except UnsupportedOperation as exc: # if body is not seekable, then retry would not work - raise error + raise error from exc await client.create( content_length=0, blob_http_headers=blob_headers, headers=headers, blob_tags_string=blob_tags_string, **kwargs) - return await upload_data_chunks( + return cast(Dict[str, Any], await upload_data_chunks( service=client, uploader_class=AppendBlobChunkUploader, total_size=length, @@ -264,7 +327,8 @@ async def upload_append_blob( # pylint: disable=unused-argument max_concurrency=max_concurrency, validate_content=validate_content, append_position_access_conditions=append_conditions, + progress_hook=progress_hook, headers=headers, - **kwargs) + **kwargs)) except HttpResponseError as error: process_storage_error(error) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/py.typed b/sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/storage/blob/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/setup.py b/sdk/eventhub/azure-eventhub-checkpointstoreblob/setup.py index f39b19007481..a4c62b0a3e1d 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/setup.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/setup.py @@ -74,9 +74,10 @@ }, install_requires=[ # dependencies for the vendored storage blob - "azure-core>=1.20.1", - "msrest>=0.6.18", + "azure-core>=1.30.0", "cryptography>=2.1.4", + "typing-extensions>=4.6.0", + "isodate>=0.6.1", # end of dependencies for the vendored storage blob 'azure-eventhub>=5.0.0', ]