Skip to content

Commit

Permalink
[EventHub] update checkpointstoreblob vendored storage for lint/typin…
Browse files Browse the repository at this point in the history
…g changes (Azure#38757)

* update changelog

* update sync blob vendor

* update async ckpt blob vendor

* update async ckpt blob changelog

* added vendored storage deps to setup.py

* cp over latest storage updates
  • Loading branch information
swathipil authored Dec 21, 2024
1 parent bf50574 commit 737adb2
Show file tree
Hide file tree
Showing 167 changed files with 67,067 additions and 41,455 deletions.
2 changes: 2 additions & 0 deletions .vscode/cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,8 @@
"sdk/translation/azure-ai-translation-document/doc/**",
"sdk/translation/azure-ai-translation-document/tests/glossaries-valid.csv",
"sdk/storage/azure-storage-blob/**",
"sdk/eventhub/azure-eventhub-checkpointstoreblob/azure/eventhub/extensions/checkpointstoreblob/_vendor/**",
"sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/azure/eventhub/extensions/checkpointstoreblobaio/_vendor/**",
"sdk/storage/azure-storage-extensions/**",
"sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/**",
"sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_virtual_cluster_utils/_restclient/**",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,11 @@ This version and all future versions will require Python 3.7+. Python 2.7 and 3.
- Fixed a bug with `BlobCheckpointStore.claim_ownership` mutating the `ownership_list` argument to no longer mutate the argument.
- Updated `azure-core` dependecy to 1.20.1 to fix `cchardet` ImportError.

### Other Changes

- Updated vendor azure-storage-blob dependency to v12.24.0.
- Fixed typing/linting issues and other bugs. See azure-storage-blob CHANGELOG.md for more info.

## 1.1.4 (2021-04-07)

This version and all future versions will require Python 2.7 or Python 3.6+, Python 3.5 is no longer supported.
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: str
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: str
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
# pylint: disable=docstring-keyword-should-match-keyword-only

import os

from typing import Union, Iterable, AnyStr, IO, Any, Dict # pylint: disable=unused-import
from typing import Any, AnyStr, cast, Dict, IO, Iterable, Optional, Union, TYPE_CHECKING
from ._version import VERSION
from ._blob_client import BlobClient
from ._container_client import ContainerClient
Expand All @@ -16,21 +18,21 @@
from ._shared_access_signature import generate_account_sas, generate_container_sas, generate_blob_sas
from ._shared.policies import ExponentialRetry, LinearRetry
from ._shared.response_handlers import PartialBatchErrorException
from ._shared.models import(
from ._shared.models import (
LocationMode,
ResourceTypes,
AccountSasPermissions,
StorageErrorCode,
UserDelegationKey
)
from ._generated.models import (
RehydratePriority
UserDelegationKey,
Services
)
from ._generated.models import RehydratePriority
from ._models import (
BlobType,
BlockState,
StandardBlobTier,
PremiumPageBlobTier,
BlobImmutabilityPolicyMode,
SequenceNumberAction,
PublicAccess,
BlobAnalyticsLogging,
Expand All @@ -54,22 +56,27 @@
BlobQueryError,
DelimitedJsonDialect,
DelimitedTextDialect,
QuickQueryDialect,
ArrowDialect,
ArrowType,
ObjectReplicationPolicy,
ObjectReplicationRule
ObjectReplicationRule,
ImmutabilityPolicy,
)
from ._list_blobs_helper import BlobPrefix

if TYPE_CHECKING:
from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential

__version__ = VERSION


def upload_blob_to_url(
blob_url, # type: str
data, # type: Union[Iterable[AnyStr], IO[AnyStr]]
credential=None, # type: Any
**kwargs):
# type: (...) -> Dict[str, Any]
blob_url: str,
data: Union[Iterable[AnyStr], IO[AnyStr]],
credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long
**kwargs: Any
) -> Dict[str, Any]:
"""Upload data to a given URL
The data will be uploaded as a block blob.
Expand All @@ -82,10 +89,17 @@ def upload_blob_to_url(
:param credential:
The credentials with which to authenticate. This is optional if the
blob URL already has a SAS token. The value can be a SAS token string,
an instance of a AzureSasCredential from azure.core.credentials, an account
shared access key, or an instance of a TokenCredentials class from azure.identity.
an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials,
an account shared access key, or an instance of a TokenCredentials class from azure.identity.
If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential
- except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.
If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key"
should be the storage account key.
:type credential:
~azure.core.credentials.AzureNamedKeyCredential or
~azure.core.credentials.AzureSasCredential or
~azure.core.credentials.TokenCredential or
str or dict[str, str] or None
:keyword bool overwrite:
Whether the blob to be uploaded should overwrite the current data.
If True, upload_blob_to_url will overwrite any existing data. If set to False, the
Expand All @@ -112,21 +126,26 @@ def upload_blob_to_url(
:rtype: dict(str, Any)
"""
with BlobClient.from_blob_url(blob_url, credential=credential) as client:
return client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs)
return cast(BlobClient, client).upload_blob(data=data, blob_type=BlobType.BLOCKBLOB, **kwargs)


def _download_to_stream(client, handle, **kwargs):
"""Download data to specified open file-handle."""
def _download_to_stream(client: BlobClient, handle: IO[bytes], **kwargs: Any) -> None:
"""
Download data to specified open file-handle.
:param BlobClient client: The BlobClient to download with.
:param Stream handle: A Stream to download the data into.
"""
stream = client.download_blob(**kwargs)
stream.readinto(handle)


def download_blob_from_url(
blob_url, # type: str
output, # type: str
credential=None, # type: Any
**kwargs):
# type: (...) -> None
blob_url: str,
output: Union[str, IO[bytes]],
credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long
**kwargs: Any
) -> None:
"""Download the contents of a blob to a local file or stream.
:param str blob_url:
Expand All @@ -138,10 +157,17 @@ def download_blob_from_url(
:param credential:
The credentials with which to authenticate. This is optional if the
blob URL already has a SAS token or the blob is public. The value can be a SAS token string,
an instance of a AzureSasCredential from azure.core.credentials,
an instance of a AzureSasCredential or AzureNamedKeyCredential from azure.core.credentials,
an account shared access key, or an instance of a TokenCredentials class from azure.identity.
If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential
- except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.
If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key"
should be the storage account key.
:type credential:
~azure.core.credentials.AzureNamedKeyCredential or
~azure.core.credentials.AzureSasCredential or
~azure.core.credentials.TokenCredential or
str or dict[str, str] or None
:keyword bool overwrite:
Whether the local file should be overwritten if it already exists. The default value is
`False` - in which case a ValueError will be raised if the file already exists. If set to
Expand Down Expand Up @@ -169,10 +195,10 @@ def download_blob_from_url(
overwrite = kwargs.pop('overwrite', False)
with BlobClient.from_blob_url(blob_url, credential=credential) as client:
if hasattr(output, 'write'):
_download_to_stream(client, output, **kwargs)
_download_to_stream(client, cast(IO[bytes], output), **kwargs)
else:
if not overwrite and os.path.isfile(output):
raise ValueError("The file '{}' already exists.".format(output))
raise ValueError(f"The file '{output}' already exists.")
with open(output, 'wb') as file_handle:
_download_to_stream(client, file_handle, **kwargs)

Expand All @@ -194,6 +220,8 @@ def download_blob_from_url(
'StandardBlobTier',
'PremiumPageBlobTier',
'SequenceNumberAction',
'BlobImmutabilityPolicyMode',
'ImmutabilityPolicy',
'PublicAccess',
'BlobAnalyticsLogging',
'Metrics',
Expand All @@ -210,6 +238,7 @@ def download_blob_from_url(
'BlobBlock',
'PageRange',
'AccessPolicy',
'QuickQueryDialect',
'ContainerSasPermissions',
'BlobSasPermissions',
'ResourceTypes',
Expand All @@ -229,5 +258,6 @@ def download_blob_from_url(
'ArrowType',
'BlobQueryReader',
'ObjectReplicationPolicy',
'ObjectReplicationRule'
'ObjectReplicationRule',
'Services',
]
Loading

0 comments on commit 737adb2

Please sign in to comment.