diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/CHANGELOG.md b/sdk/documentintelligence/azure-ai-documentintelligence/CHANGELOG.md index 760a653e9e4c..f657032114ba 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/CHANGELOG.md +++ b/sdk/documentintelligence/azure-ai-documentintelligence/CHANGELOG.md @@ -34,6 +34,7 @@ ### Other Changes +- Changed the default service API version to `2024-11-30`. - No need to pass `content-type` when analyze_request is a stream in `begin_analyze_document()` and `begin_classify_document()`. ## 1.0.0b4 (2024-09-05) diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/README.md b/sdk/documentintelligence/azure-ai-documentintelligence/README.md index da31d417ab98..c7c8989c522f 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/README.md +++ b/sdk/documentintelligence/azure-ai-documentintelligence/README.md @@ -951,7 +951,7 @@ client = DocumentIntelligenceAdministrationClient(endpoint=endpoint, credential= # The `send_request` method can send custom HTTP requests that share the client's existing pipeline, # Now let's use the `send_request` method to make a resource details fetching request. # The URL of the request should be absolute, and append the API version used for the request. -request = HttpRequest(method="GET", url=f"{endpoint}/documentintelligence/info?api-version=2024-07-31-preview") +request = HttpRequest(method="GET", url=f"{endpoint}/documentintelligence/info?api-version=2024-11-30") response = client.send_request(request) response.raise_for_status() response_body = response.json() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/assets.json b/sdk/documentintelligence/azure-ai-documentintelligence/assets.json index 12e24d0353f8..fd95bec75946 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/assets.json +++ b/sdk/documentintelligence/azure-ai-documentintelligence/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/documentintelligence/azure-ai-documentintelligence", - "Tag": "python/documentintelligence/azure-ai-documentintelligence_faf458f6e7" + "Tag": "python/documentintelligence/azure-ai-documentintelligence_bfcdb2d242" } diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/_patch.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/_patch.py index f12de7bd2e3d..1335813c446f 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/_patch.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/_patch.py @@ -449,7 +449,7 @@ def begin_analyze_document( :param model_id: Unique document model name. Required. :type model_id: str - :param body: Analyze request parameters. Default value is None. + :param body: Analyze request parameters. Required. :type body: JSON :keyword pages: 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str @@ -501,7 +501,7 @@ def begin_analyze_document( :param model_id: Unique document model name. Required. :type model_id: str - :param body: Analyze request parameters. Default value is None. + :param body: Analyze request parameters. Required. :type body: IO[bytes] :keyword pages: 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str @@ -553,7 +553,7 @@ def begin_analyze_document( :param model_id: Unique document model name. Required. :type model_id: str :param body: Analyze request parameters. Is one of the following types: - AnalyzeDocumentRequest, JSON, IO[bytes] Default value is None. + AnalyzeDocumentRequest, JSON, IO[bytes] Required. :type body: ~azure.ai.documentintelligence.models.AnalyzeDocumentRequest or JSON or IO[bytes] :keyword pages: 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. @@ -585,15 +585,13 @@ def begin_analyze_document( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("content-type", "application/octet-stream") - ) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) cls: ClsType[_models.AnalyzeResult] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - if isinstance(body, io.BytesIO): + if isinstance(body, (bytes, io.BytesIO, io.BufferedReader)): content_type = "application/octet-stream" raw_result = self._analyze_document_initial( model_id=model_id, @@ -683,10 +681,8 @@ def begin_classify_document( :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("content-type", "application/octet-stream") - ) - if isinstance(body, io.BytesIO): + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + if isinstance(body, (bytes, io.BytesIO, io.BufferedReader)): content_type = "application/octet-stream" return super().begin_classify_document( # type: ignore[arg-type, misc] classifier_id=classifier_id, diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_patch.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_patch.py index 7ec98325b614..a77adcdfb601 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_patch.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_patch.py @@ -25,7 +25,7 @@ class DocumentIntelligenceClient(DIClientGenerated): :type credential: ~azure.core.credentials.AzureKeyCredential or ~azure.core.credentials.TokenCredential :keyword api_version: The API version to use for this operation. Default value is - "2024-07-31-preview". Note that overriding this default value may result in unsupported + "2024-11-30". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no @@ -58,7 +58,7 @@ class DocumentIntelligenceAdministrationClient(DIAClientGenerated): :type credential: ~azure.core.credentials.AzureKeyCredential or ~azure.core.credentials.TokenCredential :keyword api_version: The API version to use for this operation. Default value is - "2024-07-31-preview". Note that overriding this default value may result in unsupported + "2024-11-30". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/_patch.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/_patch.py index 8e28bbde04a7..fc2a83c94837 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/_patch.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/_patch.py @@ -566,15 +566,13 @@ async def begin_analyze_document( # type: ignore[override] _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("content-type", "application/octet-stream") - ) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) cls: ClsType[_models.AnalyzeResult] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - if isinstance(body, io.BytesIO): + if isinstance(body, (bytes, io.BytesIO, io.BufferedReader)): content_type = "application/octet-stream" raw_result = await self._analyze_document_initial( model_id=model_id, @@ -665,10 +663,8 @@ async def begin_classify_document( # type: ignore[override] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("content-type", "application/octet-stream") - ) - if isinstance(body, io.BytesIO): + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + if isinstance(body, (bytes, io.BytesIO, io.BufferedReader)): content_type = "application/octet-stream" return await super().begin_classify_document( # type: ignore[arg-type, misc] classifier_id=classifier_id, diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_patch.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_patch.py index 821d56fb6891..471c1967baa0 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_patch.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_patch.py @@ -28,7 +28,7 @@ class DocumentIntelligenceClient(DIClientGenerated): :type credential: ~azure.core.credentials.AzureKeyCredential or ~azure.core.credentials_async.AsyncTokenCredential :keyword api_version: The API version to use for this operation. Default value is - "2024-07-31-preview". Note that overriding this default value may result in unsupported + "2024-11-30". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ @@ -59,7 +59,7 @@ class DocumentIntelligenceAdministrationClient(DIAClientGenerated): :type credential: ~azure.core.credentials.AzureKeyCredential or ~azure.core.credentials_async.AsyncTokenCredential :keyword api_version: The API version to use for this operation. Default value is - "2024-07-31-preview". Note that overriding this default value may result in unsupported + "2024-11-30". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/_patch.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/_patch.py index c385980aebd5..f7dd32510333 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/_patch.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/_patch.py @@ -6,45 +6,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List, Optional -from ._models import ( - AnalyzeDocumentRequest as GeneratedAnalyzeDocumentRequest, - ClassifyDocumentRequest as GeneratedClassifyDocumentRequest, -) -from .._model_base import rest_field +from typing import List - -class AnalyzeDocumentRequest(GeneratedAnalyzeDocumentRequest): - """Document analysis parameters. - - :ivar url_source: Document URL to analyze. Either url_source or bytes_source must be specified. - :vartype url_source: str - :ivar bytes_source: Document bytes to analyze. Either url_source or bytes_source must be specified. - :vartype bytes_source: bytes - """ - - bytes_source: Optional[bytes] = rest_field(name="base64Source", format="base64") - """Document bytes to analyze. Either url_source or bytes_source must be specified.""" - - -class ClassifyDocumentRequest(GeneratedClassifyDocumentRequest): - """Document classification parameters. - - :ivar url_source: Document URL to classify. Either url_source or bytes_source must be - specified. - :vartype url_source: str - :ivar bytes_source: Document bytes to classify. Either url_source or bytes_source must be specified. - :vartype bytes_source: bytes - """ - - bytes_source: Optional[bytes] = rest_field(name="base64Source", format="base64") - """Document bytes to classify. Either url_source or bytes_source must be specified.""" - - -__all__: List[str] = [ - "AnalyzeDocumentRequest", - "ClassifyDocumentRequest", -] # Add all objects you want publicly available to users at this package level +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/samples/async_samples/sample_analyze_batch_documents_async.py b/sdk/documentintelligence/azure-ai-documentintelligence/samples/async_samples/sample_analyze_batch_documents_async.py index 3db9e7b263e4..06edddca3813 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/samples/async_samples/sample_analyze_batch_documents_async.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/samples/async_samples/sample_analyze_batch_documents_async.py @@ -52,8 +52,8 @@ async def analyze_batch_docs(): endpoint = os.environ["DOCUMENTINTELLIGENCE_ENDPOINT"] key = os.environ["DOCUMENTINTELLIGENCE_API_KEY"] - result_container_sas_url = os.environ["RESULT_SAS_URL"] - batch_training_data_container_sas_url = os.environ["TRAINING_DATA_SAS_URL"] + result_container_sas_url = os.environ["RESULT_CONTAINER_SAS_URL"] + batch_training_data_container_sas_url = os.environ["TRAINING_DATA_CONTAINER_SAS_URL"] document_intelligence_client = DocumentIntelligenceClient(endpoint=endpoint, credential=AzureKeyCredential(key)) diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/samples/async_samples/sample_analyze_receipts_async.py b/sdk/documentintelligence/azure-ai-documentintelligence/samples/async_samples/sample_analyze_receipts_async.py index ada9415bb115..76f583b8d066 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/samples/async_samples/sample_analyze_receipts_async.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/samples/async_samples/sample_analyze_receipts_async.py @@ -29,6 +29,8 @@ def format_price(price_dict): + if price_dict is None: + return "N/A" return "".join([f"{p}" for p in price_dict.values()]) diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/samples/async_samples/sample_send_request_async.py b/sdk/documentintelligence/azure-ai-documentintelligence/samples/async_samples/sample_send_request_async.py index 7db840983d3e..1446c45b4f98 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/samples/async_samples/sample_send_request_async.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/samples/async_samples/sample_send_request_async.py @@ -37,7 +37,7 @@ async def sample_send_request(): # The `send_request` method can send custom HTTP requests that share the client's existing pipeline, # Now let's use the `send_request` method to make a resource details fetching request. # The URL of the request should be absolute, and append the API version used for the request. - request = HttpRequest(method="GET", url=f"{endpoint}/documentintelligence/info?api-version=2024-07-31-preview") + request = HttpRequest(method="GET", url=f"{endpoint}/documentintelligence/info?api-version=2024-11-30") response = await client.send_request(request) response.raise_for_status() response_body = response.json() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/samples/sample_analyze_batch_documents.py b/sdk/documentintelligence/azure-ai-documentintelligence/samples/sample_analyze_batch_documents.py index 2dbdab858c2d..5f0c9d51128f 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/samples/sample_analyze_batch_documents.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/samples/sample_analyze_batch_documents.py @@ -51,8 +51,8 @@ def analyze_batch_docs(): endpoint = os.environ["DOCUMENTINTELLIGENCE_ENDPOINT"] key = os.environ["DOCUMENTINTELLIGENCE_API_KEY"] - result_container_sas_url = os.environ["RESULT_SAS_URL"] - batch_training_data_container_sas_url = os.environ["TRAINING_DATA_SAS_URL"] + result_container_sas_url = os.environ["RESULT_CONTAINER_SAS_URL"] + batch_training_data_container_sas_url = os.environ["TRAINING_DATA_CONTAINER_SAS_URL"] document_intelligence_client = DocumentIntelligenceClient(endpoint=endpoint, credential=AzureKeyCredential(key)) diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/samples/sample_analyze_receipts.py b/sdk/documentintelligence/azure-ai-documentintelligence/samples/sample_analyze_receipts.py index c37880b3fafb..492ddbe8eaa1 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/samples/sample_analyze_receipts.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/samples/sample_analyze_receipts.py @@ -42,6 +42,8 @@ def analyze_receipts(): from azure.ai.documentintelligence.models import AnalyzeResult def _format_price(price_dict): + if price_dict is None: + return "N/A" return "".join([f"{p}" for p in price_dict.values()]) endpoint = os.environ["DOCUMENTINTELLIGENCE_ENDPOINT"] diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/samples/sample_send_request.py b/sdk/documentintelligence/azure-ai-documentintelligence/samples/sample_send_request.py index 60a7c2c70e13..9e1d71765e1d 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/samples/sample_send_request.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/samples/sample_send_request.py @@ -36,7 +36,7 @@ def sample_send_request(): # The `send_request` method can send custom HTTP requests that share the client's existing pipeline, # Now let's use the `send_request` method to make a resource details fetching request. # The URL of the request should be absolute, and append the API version used for the request. - request = HttpRequest(method="GET", url=f"{endpoint}/documentintelligence/info?api-version=2024-07-31-preview") + request = HttpRequest(method="GET", url=f"{endpoint}/documentintelligence/info?api-version=2024-11-30") response = client.send_request(request) response.raise_for_status() response_body = response.json() diff --git a/sdk/documentintelligence/test-resources.json b/sdk/documentintelligence/test-resources.json index ef541468f252..5e62bdf37574 100644 --- a/sdk/documentintelligence/test-resources.json +++ b/sdk/documentintelligence/test-resources.json @@ -186,6 +186,14 @@ "type": "string", "value": "[concat(reference(parameters('blobResourceId'), '2019-06-01').primaryEndpoints.blob, parameters('batchTrainingDataContainer'), '?', listServiceSas(parameters('blobResourceId'), '2019-06-01', parameters('batchTrainingResultSasProperties')).serviceSasToken)]" }, + "TRAINING_DATA_CONTAINER_SAS_URL": { + "type": "string", + "value": "[concat(reference(parameters('blobResourceId'), '2019-06-01').primaryEndpoints.blob, parameters('batchTrainingDataContainer'), '?', listServiceSas(parameters('blobResourceId'), '2019-06-01', parameters('batchTrainingSasProperties')).serviceSasToken)]" + }, + "RESULT_CONTAINER_SAS_URL": { + "type": "string", + "value": "[concat(reference(parameters('blobResourceId'), '2019-06-01').primaryEndpoints.blob, parameters('batchTrainingDataContainer'), '?', listServiceSas(parameters('blobResourceId'), '2019-06-01', parameters('batchTrainingResultSasProperties')).serviceSasToken)]" + }, "DOCUMENTINTELLIGENCE_BATCH_TRAINING_ASYNC_RESULT_DATA_CONTAINER_SAS_URL": { "type": "string", "value": "[concat(reference(parameters('blobResourceId'), '2019-06-01').primaryEndpoints.blob, parameters('batchTrainingDataContainer'), '?', listServiceSas(parameters('blobResourceId'), '2019-06-01', parameters('batchTrainingAsyncResultSasProperties')).serviceSasToken)]"