diff --git a/CHANGES.rst b/CHANGES.rst index 22f14b51..e40529bd 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,9 @@ Changes ------- +2.5.0 (2023-03-06) +^^^^^^^^^^^^^^^^^^ +* bump botocore to 1.29.76 (thanks @jakob-keller #999) + 2.4.2 (2022-12-22) ^^^^^^^^^^^^^^^^^^ * fix retries (#988) diff --git a/aiobotocore/__init__.py b/aiobotocore/__init__.py index cb9dc8a9..e59b17b4 100644 --- a/aiobotocore/__init__.py +++ b/aiobotocore/__init__.py @@ -1 +1 @@ -__version__ = '2.4.2' +__version__ = '2.5.0' diff --git a/aiobotocore/args.py b/aiobotocore/args.py index ee2b9314..7276ed9f 100644 --- a/aiobotocore/args.py +++ b/aiobotocore/args.py @@ -6,6 +6,7 @@ from .config import AioConfig from .endpoint import AioEndpointCreator +from .regions import AioEndpointRulesetResolver from .signers import AioRequestSigner @@ -23,6 +24,9 @@ def get_client_args( scoped_config, client_config, endpoint_bridge, + auth_token=None, + endpoints_ruleset_data=None, + partition_data=None, ): final_args = self.compute_client_args( service_model, @@ -54,6 +58,7 @@ def get_client_args( endpoint_config['signature_version'], credentials, event_emitter, + auth_token, ) config_kwargs['s3'] = s3_config @@ -86,6 +91,21 @@ def get_client_args( protocol, parameter_validation ) response_parser = botocore.parsers.create_parser(protocol) + + ruleset_resolver = self._build_endpoint_resolver( + endpoints_ruleset_data, + partition_data, + client_config, + service_model, + endpoint_region_name, + region_name, + endpoint_url, + endpoint, + is_secure, + endpoint_bridge, + event_emitter, + ) + return { 'serializer': serializer, 'endpoint': endpoint, @@ -97,4 +117,67 @@ def get_client_args( 'client_config': new_config, 'partition': partition, 'exceptions_factory': self._exceptions_factory, + 'endpoint_ruleset_resolver': ruleset_resolver, } + + def _build_endpoint_resolver( + self, + endpoints_ruleset_data, + partition_data, + client_config, + service_model, + endpoint_region_name, + region_name, + endpoint_url, + endpoint, + is_secure, + endpoint_bridge, + event_emitter, + ): + if endpoints_ruleset_data is None: + return None + + # The legacy EndpointResolver is global to the session, but + # EndpointRulesetResolver is service-specific. Builtins for + # EndpointRulesetResolver must not be derived from the legacy + # endpoint resolver's output, including final_args, s3_config, + # etc. + s3_config_raw = self.compute_s3_config(client_config) or {} + service_name_raw = service_model.endpoint_prefix + # Maintain complex logic for s3 and sts endpoints for backwards + # compatibility. + if service_name_raw in ['s3', 'sts'] or region_name is None: + eprv2_region_name = endpoint_region_name + else: + eprv2_region_name = region_name + resolver_builtins = self.compute_endpoint_resolver_builtin_defaults( + region_name=eprv2_region_name, + service_name=service_name_raw, + s3_config=s3_config_raw, + endpoint_bridge=endpoint_bridge, + client_endpoint_url=endpoint_url, + legacy_endpoint_url=endpoint.host, + ) + # botocore does not support client context parameters generically + # for every service. Instead, the s3 config section entries are + # available as client context parameters. In the future, endpoint + # rulesets of services other than s3/s3control may require client + # context parameters. + client_context = ( + s3_config_raw if self._is_s3_service(service_name_raw) else {} + ) + sig_version = ( + client_config.signature_version + if client_config is not None + else None + ) + return AioEndpointRulesetResolver( + endpoint_ruleset_data=endpoints_ruleset_data, + partition_data=partition_data, + service_model=service_model, + builtins=resolver_builtins, + client_context=client_context, + event_emitter=event_emitter, + use_ssl=is_secure, + requested_auth_scheme=sig_version, + ) diff --git a/aiobotocore/client.py b/aiobotocore/client.py index 84b5f08b..7df766a7 100644 --- a/aiobotocore/client.py +++ b/aiobotocore/client.py @@ -4,13 +4,11 @@ ClientCreator, ClientEndpointBridge, PaginatorDocstring, - S3ArnParamHandler, - S3EndpointSetter, logger, resolve_checksum_context, ) from botocore.discovery import block_endpoint_discovery_required_operations -from botocore.exceptions import OperationNotPageableError +from botocore.exceptions import OperationNotPageableError, UnknownServiceError from botocore.history import get_global_history_recorder from botocore.hooks import first_non_none_response from botocore.utils import get_service_module_name @@ -22,7 +20,7 @@ from .httpchecksum import apply_request_checksum from .paginate import AioPaginator from .retries import adaptive, standard -from .utils import AioS3RegionRedirector +from .utils import AioS3RegionRedirectorv2 history_recorder = get_global_history_recorder() @@ -39,12 +37,27 @@ async def create_client( scoped_config=None, api_version=None, client_config=None, + auth_token=None, ): responses = await self._event_emitter.emit( 'choose-service-name', service_name=service_name ) service_name = first_non_none_response(responses, default=service_name) service_model = self._load_service_model(service_name, api_version) + try: + endpoints_ruleset_data = self._load_service_endpoints_ruleset( + service_name, api_version + ) + partition_data = self._loader.load_data('partitions') + except UnknownServiceError: + endpoints_ruleset_data = None + partition_data = None + logger.info( + 'No endpoints ruleset found for service %s, falling back to ' + 'legacy endpoint routing.', + service_name, + ) + cls = await self._create_client_class(service_name, service_model) region_name, client_config = self._normalize_fips_region( region_name, client_config @@ -55,6 +68,9 @@ async def create_client( client_config, service_signing_name=service_model.metadata.get('signingName'), config_store=self._config_store, + service_signature_version=service_model.metadata.get( + 'signatureVersion' + ), ) client_args = self._get_client_args( service_model, @@ -66,26 +82,20 @@ async def create_client( scoped_config, client_config, endpoint_bridge, + auth_token, + endpoints_ruleset_data, + partition_data, ) service_client = cls(**client_args) self._register_retries(service_client) - self._register_eventbridge_events( - service_client, endpoint_bridge, endpoint_url - ) self._register_s3_events( - service_client, - endpoint_bridge, - endpoint_url, - client_config, - scoped_config, - ) - self._register_s3_control_events( - service_client, - endpoint_bridge, - endpoint_url, - client_config, - scoped_config, + client=service_client, + endpoint_bridge=None, + endpoint_url=None, + client_config=client_config, + scoped_config=scoped_config, ) + self._register_s3_control_events(client=service_client) self._register_endpoint_discovery( service_client, endpoint_url, client_config ) @@ -222,17 +232,7 @@ def _register_s3_events( ): if client.meta.service_model.service_name != 's3': return - AioS3RegionRedirector(endpoint_bridge, client).register() - S3ArnParamHandler().register(client.meta.events) - use_fips_endpoint = client.meta.config.use_fips_endpoint - S3EndpointSetter( - endpoint_resolver=self._endpoint_resolver, - region=client.meta.region_name, - s3_config=client.meta.config.s3, - endpoint_url=endpoint_url, - partition=client.meta.partition, - use_fips_endpoint=use_fips_endpoint, - ).register(client.meta.events) + AioS3RegionRedirectorv2(None, client).register() self._set_s3_presign_signature_version( client.meta, client_config, scoped_config ) @@ -248,6 +248,9 @@ def _get_client_args( scoped_config, client_config, endpoint_bridge, + auth_token, + endpoints_ruleset_data, + partition_data, ): # This is a near copy of ClientCreator. What's replaced # is ClientArgsCreator->AioClientArgsCreator @@ -269,6 +272,9 @@ def _get_client_args( scoped_config, client_config, endpoint_bridge, + auth_token, + endpoints_ruleset_data, + partition_data, ) @@ -318,8 +324,15 @@ async def _make_api_call(self, operation_name, api_params): 'has_streaming_input': operation_model.has_streaming_input, 'auth_type': operation_model.auth_type, } + endpoint_url, additional_headers = await self._resolve_endpoint_ruleset( # noqa: BLK100 + operation_model, api_params, request_context + ) request_dict = await self._convert_to_request_dict( - api_params, operation_model, context=request_context + api_params=api_params, + operation_model=operation_model, + endpoint_url=endpoint_url, + context=request_context, + headers=additional_headers, ) resolve_checksum_context(request_dict, operation_model, api_params) @@ -378,7 +391,13 @@ async def _make_request( raise async def _convert_to_request_dict( - self, api_params, operation_model, context=None + self, + api_params, + operation_model, + endpoint_url, + context=None, + headers=None, + set_user_agent_header=True, ): api_params = await self._emit_api_params( api_params, operation_model, context @@ -388,10 +407,16 @@ async def _convert_to_request_dict( ) if not self._client_config.inject_host_prefix: request_dict.pop('host_prefix', None) + if headers is not None: + request_dict['headers'].update(headers) + if set_user_agent_header: + user_agent = self._client_config.user_agent + else: + user_agent = None prepare_request_dict( request_dict, - endpoint_url=self._endpoint.host, - user_agent=self._client_config.user_agent, + endpoint_url=endpoint_url, + user_agent=user_agent, context=context, ) return request_dict @@ -421,6 +446,56 @@ async def _emit_api_params(self, api_params, operation_model, context): ) return api_params + async def _resolve_endpoint_ruleset( + self, + operation_model, + params, + request_context, + ignore_signing_region=False, + ): + """Returns endpoint URL and list of additional headers returned from + EndpointRulesetResolver for the given operation and params. If the + ruleset resolver is not available, for example because the service has + no endpoints ruleset file, the legacy endpoint resolver's value is + returned. + + Use ignore_signing_region for generating presigned URLs or any other + situtation where the signing region information from the ruleset + resolver should be ignored. + + Returns tuple of URL and headers dictionary. Additionally, the + request_context dict is modified in place with any signing information + returned from the ruleset resolver. + """ + if self._ruleset_resolver is None: + endpoint_url = self.meta.endpoint_url + additional_headers = {} + else: + endpoint_info = await self._ruleset_resolver.construct_endpoint( + operation_model=operation_model, + call_args=params, + request_context=request_context, + ) + endpoint_url = endpoint_info.url + additional_headers = endpoint_info.headers + # If authSchemes is present, overwrite default auth type and + # signing context derived from service model. + auth_schemes = endpoint_info.properties.get('authSchemes') + if auth_schemes is not None: + auth_info = self._ruleset_resolver.auth_schemes_to_signing_ctx( + auth_schemes + ) + auth_type, signing_context = auth_info + request_context['auth_type'] = auth_type + if 'region' in signing_context and ignore_signing_region: + del signing_context['region'] + if 'signing' in request_context: + request_context['signing'].update(signing_context) + else: + request_context['signing'] = signing_context + + return endpoint_url, additional_headers + def get_paginator(self, operation_name): """Create a paginator for an operation. diff --git a/aiobotocore/credentials.py b/aiobotocore/credentials.py index d498e204..440e2d4a 100644 --- a/aiobotocore/credentials.py +++ b/aiobotocore/credentials.py @@ -53,6 +53,7 @@ from dateutil.tz import tzutc from aiobotocore.config import AioConfig +from aiobotocore.tokens import AioSSOTokenProvider from aiobotocore.utils import ( AioContainerMetadataFetcher, AioInstanceMetadataFetcher, @@ -191,6 +192,11 @@ def _create_sso_provider(self, profile_name): profile_name=profile_name, cache=self._cache, token_cache=self._sso_token_cache, + token_provider=AioSSOTokenProvider( + self._session, + cache=self._sso_token_cache, + profile_name=profile_name, + ), ) @@ -537,7 +543,7 @@ async def load(self): metadata = await fetcher.retrieve_iam_role_credentials() if not metadata: return None - logger.debug( + logger.info( 'Found credentials from IAM Role: %s', metadata['role_name'] ) @@ -974,6 +980,8 @@ def __init__( token_loader=None, cache=None, expiry_window_seconds=None, + token_provider=None, + sso_session_name=None, ): self._client_creator = client_creator self._sso_region = sso_region @@ -981,14 +989,19 @@ def __init__( self._account_id = account_id self._start_url = start_url self._token_loader = token_loader + self._token_provider = token_provider + self._sso_session_name = sso_session_name super().__init__(cache, expiry_window_seconds) def _create_cache_key(self): args = { - 'startUrl': self._start_url, 'roleName': self._role_name, 'accountId': self._account_id, } + if self._sso_session_name: + args['sessionName'] = self._sso_session_name + else: + args['startUrl'] = self._start_url args = json.dumps(args, sort_keys=True, separators=(',', ':')) argument_hash = sha1(args.encode('utf-8')).hexdigest() @@ -1007,10 +1020,16 @@ async def _get_credentials(self): region_name=self._sso_region, ) async with self._client_creator('sso', config=config) as client: + if self._token_provider: + initial_token_data = self._token_provider.load_token() + token = (await initial_token_data.get_frozen_token()).token + else: + token = self._token_loader(self._start_url)['accessToken'] + kwargs = { 'roleName': self._role_name, 'accountId': self._account_id, - 'accessToken': self._token_loader(self._start_url), + 'accessToken': token, } try: response = await client.get_role_credentials(**kwargs) @@ -1038,15 +1057,20 @@ async def load(self): if not sso_config: return None - sso_fetcher = AioSSOCredentialFetcher( - sso_config['sso_start_url'], - sso_config['sso_region'], - sso_config['sso_role_name'], - sso_config['sso_account_id'], - self._client_creator, - token_loader=SSOTokenLoader(cache=self._token_cache), - cache=self.cache, - ) + fetcher_kwargs = { + 'start_url': sso_config['sso_start_url'], + 'sso_region': sso_config['sso_region'], + 'role_name': sso_config['sso_role_name'], + 'account_id': sso_config['sso_account_id'], + 'client_creator': self._client_creator, + 'token_loader': SSOTokenLoader(cache=self._token_cache), + 'cache': self.cache, + } + if 'sso_session' in sso_config: + fetcher_kwargs['sso_session_name'] = sso_config['sso_session'] + fetcher_kwargs['token_provider'] = self._token_provider + + sso_fetcher = AioSSOCredentialFetcher(**fetcher_kwargs) return AioDeferredRefreshableCredentials( method=self.METHOD, diff --git a/aiobotocore/httpchecksum.py b/aiobotocore/httpchecksum.py index 9b06dc40..1b99a704 100644 --- a/aiobotocore/httpchecksum.py +++ b/aiobotocore/httpchecksum.py @@ -141,7 +141,12 @@ def _apply_request_trailer_checksum(request): # Cannot set this as aiohttp complains headers["Transfer-Encoding"] = "chunked" - headers["Content-Encoding"] = "aws-chunked" + if "Content-Encoding" in headers: + # We need to preserve the existing content encoding and add + # aws-chunked as a new content encoding. + headers["Content-Encoding"] += ",aws-chunked" + else: + headers["Content-Encoding"] = "aws-chunked" headers["X-Amz-Trailer"] = location_name content_length = determine_content_length(body) diff --git a/aiobotocore/regions.py b/aiobotocore/regions.py new file mode 100644 index 00000000..b91b95dd --- /dev/null +++ b/aiobotocore/regions.py @@ -0,0 +1,107 @@ +import copy +import logging + +from botocore.exceptions import EndpointProviderError +from botocore.regions import EndpointRulesetResolver + +LOG = logging.getLogger(__name__) + + +class AioEndpointRulesetResolver(EndpointRulesetResolver): + async def construct_endpoint( + self, + operation_model, + call_args, + request_context, + ): + """Invokes the provider with params defined in the service's ruleset""" + if call_args is None: + call_args = {} + + if request_context is None: + request_context = {} + + provider_params = await self._get_provider_params( + operation_model, call_args, request_context + ) + LOG.debug( + 'Calling endpoint provider with parameters: %s' % provider_params + ) + try: + provider_result = self._provider.resolve_endpoint( + **provider_params + ) + except EndpointProviderError as ex: + botocore_exception = self.ruleset_error_to_botocore_exception( + ex, provider_params + ) + if botocore_exception is None: + raise + else: + raise botocore_exception from ex + LOG.debug('Endpoint provider result: %s' % provider_result.url) + + # The endpoint provider does not support non-secure transport. + if not self._use_ssl and provider_result.url.startswith('https://'): + provider_result = provider_result._replace( + url=f'http://{provider_result.url[8:]}' + ) + + # Multi-valued headers are not supported in botocore. Replace the list + # of values returned for each header with just its first entry, + # dropping any additionally entries. + provider_result = provider_result._replace( + headers={ + key: val[0] for key, val in provider_result.headers.items() + } + ) + + return provider_result + + async def _get_provider_params( + self, operation_model, call_args, request_context + ): + """Resolve a value for each parameter defined in the service's ruleset + + The resolution order for parameter values is: + 1. Operation-specific static context values from the service definition + 2. Operation-specific dynamic context values from API parameters + 3. Client-specific context parameters + 4. Built-in values such as region, FIPS usage, ... + """ + provider_params = {} + # Builtin values can be customized for each operation by hooks + # subscribing to the ``before-endpoint-resolution.*`` event. + customized_builtins = await self._get_customized_builtins( + operation_model, call_args, request_context + ) + for param_name, param_def in self._param_definitions.items(): + param_val = self._resolve_param_from_context( + param_name=param_name, + operation_model=operation_model, + call_args=call_args, + ) + if param_val is None and param_def.builtin is not None: + param_val = self._resolve_param_as_builtin( + builtin_name=param_def.builtin, + builtins=customized_builtins, + ) + if param_val is not None: + provider_params[param_name] = param_val + + return provider_params + + async def _get_customized_builtins( + self, operation_model, call_args, request_context + ): + service_id = self._service_model.service_id.hyphenize() + customized_builtins = copy.copy(self._builtins) + # Handlers are expected to modify the builtins dict in place. + await self._event_emitter.emit( + 'before-endpoint-resolution.%s' % service_id, + builtins=customized_builtins, + model=operation_model, + params=call_args, + context=request_context, + ) + return customized_builtins diff --git a/aiobotocore/session.py b/aiobotocore/session.py index dfb88a7a..d400f764 100644 --- a/aiobotocore/session.py +++ b/aiobotocore/session.py @@ -14,6 +14,7 @@ from .credentials import AioCredentials, create_credential_resolver from .hooks import AioHierarchicalEmitter from .parsers import AioResponseParserFactory +from .tokens import create_token_resolver from .utils import AioIMDSRegionProvider @@ -47,6 +48,9 @@ def __init__( session_vars, event_hooks, include_builtin_handlers, profile ) + def _create_token_resolver(self): + return create_token_resolver(self) + def _create_credential_resolver(self): return create_credential_resolver( self, region_name=self._last_client_region_used @@ -167,6 +171,7 @@ async def _create_client( ) else: credentials = await self.get_credentials() + auth_token = self.get_auth_token() endpoint_resolver = self._get_internal_component('endpoint_resolver') exceptions_factory = self._get_internal_component('exceptions_factory') config_store = self.get_component('config_store') @@ -200,6 +205,7 @@ async def _create_client( scoped_config=self.get_scoped_config(), client_config=config, api_version=api_version, + auth_token=auth_token, ) monitor = self._get_internal_component('monitor') if monitor is not None: diff --git a/aiobotocore/signers.py b/aiobotocore/signers.py index 49c15f93..a3df9647 100644 --- a/aiobotocore/signers.py +++ b/aiobotocore/signers.py @@ -12,6 +12,7 @@ create_request_object, prepare_request_dict, ) +from botocore.utils import ArnParser class AioRequestSigner(RequestSigner): @@ -87,7 +88,12 @@ async def _choose_signer(self, operation_name, signing_type, context): } suffix = signing_type_suffix_map.get(signing_type, '') - signature_version = self._signature_version + # operation specific signing context takes precedent over client-level + # defaults + signature_version = context.get('auth_type') or self._signature_version + signing = context.get('signing', {}) + signing_name = signing.get('signing_name', self._signing_name) + region_name = signing.get('region', self._region_name) if ( signature_version is not botocore.UNSIGNED and not signature_version.endswith(suffix) @@ -98,8 +104,8 @@ async def _choose_signer(self, operation_name, signing_type, context): 'choose-signer.{}.{}'.format( self._service_id.hyphenize(), operation_name ), - signing_name=self._signing_name, - region_name=self._region_name, + signing_name=signing_name, + region_name=region_name, signature_version=signature_version, context=context, ) @@ -128,6 +134,13 @@ async def get_auth_instance( signature_version=signature_version ) + if cls.REQUIRES_TOKEN is True: + frozen_token = None + if self._auth_token is not None: + frozen_token = await self._auth_token.get_frozen_token() + auth = cls(frozen_token) + return auth + frozen_credentials = None if self._credentials is not None: frozen_credentials = ( @@ -305,7 +318,6 @@ async def generate_presigned_url( } request_signer = self._request_signer - serializer = self._serializer try: operation_name = self._PY_TO_OP_NAME[client_method] @@ -313,21 +325,27 @@ async def generate_presigned_url( raise UnknownClientMethodError(method_name=client_method) operation_model = self.meta.service_model.operation_model(operation_name) + bucket_is_arn = ArnParser.is_arn(params.get('Bucket', '')) + endpoint_url, additional_headers = await self._resolve_endpoint_ruleset( + operation_model, + params, + context, + ignore_signing_region=(not bucket_is_arn), + ) - params = await self._emit_api_params(params, operation_model, context) - - # Create a request dict based on the params to serialize. - request_dict = serializer.serialize_to_request(params, operation_model) + request_dict = await self._convert_to_request_dict( + api_params=params, + operation_model=operation_model, + endpoint_url=endpoint_url, + context=context, + headers=additional_headers, + set_user_agent_header=False, + ) # Switch out the http method if user specified it. if http_method is not None: request_dict['method'] = http_method - # Prepare the request dict by including the client's endpoint url. - prepare_request_dict( - request_dict, endpoint_url=self.meta.endpoint_url, context=context - ) - # Generate the presigned url. return await request_signer.generate_presigned_url( request_dict=request_dict, @@ -357,26 +375,32 @@ async def generate_presigned_post( if conditions is None: conditions = [] + context = { + 'is_presign_request': True, + 'use_global_endpoint': _should_use_global_endpoint(self), + } + post_presigner = AioS3PostPresigner(self._request_signer) - serializer = self._serializer # We choose the CreateBucket operation model because its url gets # serialized to what a presign post requires. operation_model = self.meta.service_model.operation_model('CreateBucket') - - # Create a request dict based on the params to serialize. - request_dict = serializer.serialize_to_request( - {'Bucket': bucket}, operation_model + params = {'Bucket': bucket} + bucket_is_arn = ArnParser.is_arn(params.get('Bucket', '')) + endpoint_url, additional_headers = await self._resolve_endpoint_ruleset( + operation_model, + params, + context, + ignore_signing_region=(not bucket_is_arn), ) - # Prepare the request dict by including the client's endpoint url. - prepare_request_dict( - request_dict, - endpoint_url=self.meta.endpoint_url, - context={ - 'is_presign_request': True, - 'use_global_endpoint': _should_use_global_endpoint(self), - }, + request_dict = await self._convert_to_request_dict( + api_params=params, + operation_model=operation_model, + endpoint_url=endpoint_url, + context=context, + headers=additional_headers, + set_user_agent_header=False, ) # Append that the bucket name to the list of conditions. diff --git a/aiobotocore/tokens.py b/aiobotocore/tokens.py new file mode 100644 index 00000000..5e6ca71b --- /dev/null +++ b/aiobotocore/tokens.py @@ -0,0 +1,160 @@ +import asyncio +import logging +from datetime import timedelta + +import dateutil.parser +from botocore.compat import total_seconds +from botocore.exceptions import ClientError, TokenRetrievalError +from botocore.tokens import ( + DeferredRefreshableToken, + FrozenAuthToken, + SSOTokenProvider, + TokenProviderChain, + _utc_now, +) + +logger = logging.getLogger(__name__) + + +def create_token_resolver(session): + providers = [ + AioSSOTokenProvider(session), + ] + return TokenProviderChain(providers=providers) + + +class AioDeferredRefreshableToken(DeferredRefreshableToken): + def __init__( + self, method, refresh_using, time_fetcher=_utc_now + ): # noqa: E501, lgtm [py/missing-call-to-init] + self._time_fetcher = time_fetcher + self._refresh_using = refresh_using + self.method = method + + # The frozen token is protected by this lock + self._refresh_lock = asyncio.Lock() + self._frozen_token = None + self._next_refresh = None + + async def get_frozen_token(self): + await self._refresh() + return self._frozen_token + + async def _refresh(self): + # If we don't need to refresh just return + refresh_type = self._should_refresh() + if not refresh_type: + return None + + # Block for refresh if we're in the mandatory refresh window + block_for_refresh = refresh_type == "mandatory" + if block_for_refresh or not self._refresh_lock.locked(): + async with self._refresh_lock: + await self._protected_refresh() + + async def _protected_refresh(self): + # This should only be called after acquiring the refresh lock + # Another task may have already refreshed, double check refresh + refresh_type = self._should_refresh() + if not refresh_type: + return None + + try: + now = self._time_fetcher() + self._next_refresh = now + timedelta(seconds=self._attempt_timeout) + self._frozen_token = await self._refresh_using() + except Exception: + logger.warning( + "Refreshing token failed during the %s refresh period.", + refresh_type, + exc_info=True, + ) + if refresh_type == "mandatory": + # This refresh was mandatory, error must be propagated back + raise + + if self._is_expired(): + # Fresh credentials should never be expired + raise TokenRetrievalError( + provider=self.method, + error_msg="Token has expired and refresh failed", + ) + + +class AioSSOTokenProvider(SSOTokenProvider): + async def _attempt_create_token(self, token): + response = await self._client.create_token( + grantType=self._GRANT_TYPE, + clientId=token["clientId"], + clientSecret=token["clientSecret"], + refreshToken=token["refreshToken"], + ) + expires_in = timedelta(seconds=response["expiresIn"]) + new_token = { + "startUrl": self._sso_config["sso_start_url"], + "region": self._sso_config["sso_region"], + "accessToken": response["accessToken"], + "expiresAt": self._now() + expires_in, + # Cache the registration alongside the token + "clientId": token["clientId"], + "clientSecret": token["clientSecret"], + "registrationExpiresAt": token["registrationExpiresAt"], + } + if "refreshToken" in response: + new_token["refreshToken"] = response["refreshToken"] + logger.info("SSO Token refresh succeeded") + return new_token + + async def _refresh_access_token(self, token): + keys = ( + "refreshToken", + "clientId", + "clientSecret", + "registrationExpiresAt", + ) + missing_keys = [k for k in keys if k not in token] + if missing_keys: + msg = f"Unable to refresh SSO token: missing keys: {missing_keys}" + logger.info(msg) + return None + + expiry = dateutil.parser.parse(token["registrationExpiresAt"]) + if total_seconds(expiry - self._now()) <= 0: + logger.info(f"SSO token registration expired at {expiry}") + return None + + try: + return await self._attempt_create_token(token) + except ClientError: + logger.warning("SSO token refresh attempt failed", exc_info=True) + return None + + async def _refresher(self): + start_url = self._sso_config["sso_start_url"] + session_name = self._sso_config["session_name"] + logger.info(f"Loading cached SSO token for {session_name}") + token_dict = self._token_loader(start_url, session_name=session_name) + expiration = dateutil.parser.parse(token_dict["expiresAt"]) + logger.debug(f"Cached SSO token expires at {expiration}") + + remaining = total_seconds(expiration - self._now()) + if remaining < self._REFRESH_WINDOW: + new_token_dict = await self._refresh_access_token(token_dict) + if new_token_dict is not None: + token_dict = new_token_dict + expiration = token_dict["expiresAt"] + self._token_loader.save_token( + start_url, token_dict, session_name=session_name + ) + + return FrozenAuthToken( + token_dict["accessToken"], expiration=expiration + ) + + def load_token(self): + if self._sso_config is None: + return None + + return AioDeferredRefreshableToken( + self.METHOD, self._refresher, time_fetcher=self._now + ) diff --git a/aiobotocore/utils.py b/aiobotocore/utils.py index c5959523..0eea8ff7 100644 --- a/aiobotocore/utils.py +++ b/aiobotocore/utils.py @@ -12,6 +12,7 @@ DEFAULT_METADATA_SERVICE_TIMEOUT, METADATA_BASE_URL, RETRYABLE_HTTP_ERRORS, + ArnParser, BadIMDSRequestError, ClientError, ContainerMetadataFetcher, @@ -22,6 +23,7 @@ InstanceMetadataRegionFetcher, ReadTimeoutError, S3RegionRedirector, + S3RegionRedirectorv2, get_environ_proxies, os, resolve_imds_endpoint_mode, @@ -339,6 +341,154 @@ async def _get_region(self): return region +class AioS3RegionRedirectorv2(S3RegionRedirectorv2): + async def redirect_from_error( + self, + request_dict, + response, + operation, + **kwargs, + ): + """ + An S3 request sent to the wrong region will return an error that + contains the endpoint the request should be sent to. This handler + will add the redirect information to the signing context and then + redirect the request. + """ + if response is None: + # This could be none if there was a ConnectionError or other + # transport error. + return + + redirect_ctx = request_dict.get('context', {}).get('s3_redirect', {}) + if ArnParser.is_arn(redirect_ctx.get('bucket')): + logger.debug( + 'S3 request was previously for an Accesspoint ARN, not ' + 'redirecting.' + ) + return + + if redirect_ctx.get('redirected'): + logger.debug( + 'S3 request was previously redirected, not redirecting.' + ) + return + + error = response[1].get('Error', {}) + error_code = error.get('Code') + response_metadata = response[1].get('ResponseMetadata', {}) + + # We have to account for 400 responses because + # if we sign a Head* request with the wrong region, + # we'll get a 400 Bad Request but we won't get a + # body saying it's an "AuthorizationHeaderMalformed". + is_special_head_object = ( + error_code in ('301', '400') and operation.name == 'HeadObject' + ) + is_special_head_bucket = ( + error_code in ('301', '400') + and operation.name == 'HeadBucket' + and 'x-amz-bucket-region' + in response_metadata.get('HTTPHeaders', {}) + ) + is_wrong_signing_region = ( + error_code == 'AuthorizationHeaderMalformed' and 'Region' in error + ) + is_redirect_status = response[0] is not None and response[ + 0 + ].status_code in (301, 302, 307) + is_permanent_redirect = error_code == 'PermanentRedirect' + if not any( + [ + is_special_head_object, + is_wrong_signing_region, + is_permanent_redirect, + is_special_head_bucket, + is_redirect_status, + ] + ): + return + + bucket = request_dict['context']['s3_redirect']['bucket'] + client_region = request_dict['context'].get('client_region') + new_region = await self.get_bucket_region(bucket, response) + + if new_region is None: + logger.debug( + "S3 client configured for region %s but the bucket %s is not " + "in that region and the proper region could not be " + "automatically determined." % (client_region, bucket) + ) + return + + logger.debug( + "S3 client configured for region %s but the bucket %s is in region" + " %s; Please configure the proper region to avoid multiple " + "unnecessary redirects and signing attempts." + % (client_region, bucket, new_region) + ) + # Adding the new region to _cache will make construct_endpoint() to + # use the new region as value for the AWS::Region builtin parameter. + self._cache[bucket] = new_region + + # Re-resolve endpoint with new region and modify request_dict with + # the new URL, auth scheme, and signing context. + ep_resolver = self._client._ruleset_resolver + ep_info = await ep_resolver.construct_endpoint( + operation_model=operation, + call_args=request_dict['context']['s3_redirect']['params'], + request_context=request_dict['context'], + ) + request_dict['url'] = self.set_request_url( + request_dict['url'], ep_info.url + ) + request_dict['context']['s3_redirect']['redirected'] = True + auth_schemes = ep_info.properties.get('authSchemes') + if auth_schemes is not None: + auth_info = ep_resolver.auth_schemes_to_signing_ctx(auth_schemes) + auth_type, signing_context = auth_info + request_dict['context']['auth_type'] = auth_type + request_dict['context']['signing'] = { + **request_dict['context'].get('signing', {}), + **signing_context, + } + + # Return 0 so it doesn't wait to retry + return 0 + + async def get_bucket_region(self, bucket, response): + """ + There are multiple potential sources for the new region to redirect to, + but they aren't all universally available for use. This will try to + find region from response elements, but will fall back to calling + HEAD on the bucket if all else fails. + :param bucket: The bucket to find the region for. This is necessary if + the region is not available in the error response. + :param response: A response representing a service request that failed + due to incorrect region configuration. + """ + # First try to source the region from the headers. + service_response = response[1] + response_headers = service_response['ResponseMetadata']['HTTPHeaders'] + if 'x-amz-bucket-region' in response_headers: + return response_headers['x-amz-bucket-region'] + + # Next, check the error body + region = service_response.get('Error', {}).get('Region', None) + if region is not None: + return region + + # Finally, HEAD the bucket. No other choice sadly. + try: + response = await self._client.head_bucket(Bucket=bucket) + headers = response['ResponseMetadata']['HTTPHeaders'] + except ClientError as e: + headers = e.response['ResponseMetadata']['HTTPHeaders'] + + region = headers.get('x-amz-bucket-region', None) + return region + + class AioS3RegionRedirector(S3RegionRedirector): async def redirect_from_error( self, request_dict, response, operation, **kwargs diff --git a/setup.py b/setup.py index 48abcb96..9e7973c5 100644 --- a/setup.py +++ b/setup.py @@ -7,15 +7,15 @@ # NOTE: When updating botocore make sure to update awscli/boto3 versions below install_requires = [ # pegged to also match items in `extras_require` - 'botocore>=1.27.59,<1.27.60', + 'botocore>=1.29.76,<1.29.77', 'aiohttp>=3.3.1', 'wrapt>=1.10.10', 'aioitertools>=0.5.1', ] extras_require = { - 'awscli': ['awscli>=1.25.60,<1.25.61'], - 'boto3': ['boto3>=1.24.59,<1.24.60'], + 'awscli': ['awscli>=1.27.76,<1.27.77'], + 'boto3': ['boto3>=1.26.76,<1.26.77'], } diff --git a/tests/boto_tests/test_credentials.py b/tests/boto_tests/test_credentials.py index 231b6a9b..9943c3a8 100644 --- a/tests/boto_tests/test_credentials.py +++ b/tests/boto_tests/test_credentials.py @@ -917,7 +917,7 @@ async def ssl_credential_fetcher_setup(): self.now_timestamp = 1222172800000 self.loader = mock.Mock(spec=SSOTokenLoader) - self.loader.return_value = self.access_token + self.loader.return_value = {'accessToken': self.access_token} self.fetcher = AioSSOCredentialFetcher( self.start_url, self.sso_region, diff --git a/tests/python3.8/boto_tests/test_signers.py b/tests/python3.8/boto_tests/test_signers.py index bac27d09..fa006c00 100644 --- a/tests/python3.8/boto_tests/test_signers.py +++ b/tests/python3.8/boto_tests/test_signers.py @@ -88,12 +88,13 @@ async def test_signers_generate_presigned_urls(): ref_request_dict = { 'body': b'', - 'url': 'https://s3.amazonaws.com/mybucket/mykey', + 'url': 'https://mybucket.s3.amazonaws.com/mykey', 'headers': {}, 'query_string': {}, - 'url_path': '/mybucket/mykey', + 'url_path': '/mykey', 'method': 'HEAD', 'context': mock.ANY, + 'auth_path': '/mybucket/mykey', } cls_gen_presigned_url_mock.assert_called_with( diff --git a/tests/python3.8/boto_tests/test_tokens.py b/tests/python3.8/boto_tests/test_tokens.py new file mode 100644 index 00000000..8b43ad45 --- /dev/null +++ b/tests/python3.8/boto_tests/test_tokens.py @@ -0,0 +1,356 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from unittest import mock + +import dateutil.parser +import pytest +from botocore.exceptions import ( + InvalidConfigError, + SSOTokenLoadError, + TokenRetrievalError, +) + +from aiobotocore.session import AioSession +from aiobotocore.tokens import AioSSOTokenProvider + + +def parametrize(cases): + return pytest.mark.parametrize( + "test_case", + cases, + ids=[c["documentation"] for c in cases], + ) + + +sso_provider_resolution_cases = [ + { + "documentation": "Full valid profile", + "config": { + "profiles": {"test": {"sso_session": "admin"}}, + "sso_sessions": { + "admin": { + "sso_region": "us-east-1", + "sso_start_url": "https://d-abc123.awsapps.com/start", + } + }, + }, + "resolves": True, + }, + { + "documentation": "Non-SSO profiles are skipped", + "config": {"profiles": {"test": {"region": "us-west-2"}}}, + "resolves": False, + }, + { + "documentation": "Only start URL is invalid", + "config": { + "profiles": {"test": {"sso_session": "admin"}}, + "sso_sessions": { + "admin": { + "sso_start_url": "https://d-abc123.awsapps.com/start" + } + }, + }, + "resolves": False, + "expectedException": InvalidConfigError, + }, + { + "documentation": "Only sso_region is invalid", + "config": { + "profiles": {"test": {"sso_session": "admin"}}, + "sso_sessions": {"admin": {"sso_region": "us-east-1"}}, + }, + "resolves": False, + "expectedException": InvalidConfigError, + }, + { + "documentation": "Specified sso-session must exist", + "config": { + "profiles": {"test": {"sso_session": "dev"}}, + "sso_sessions": {"admin": {"sso_region": "us-east-1"}}, + }, + "resolves": False, + "expectedException": InvalidConfigError, + }, + { + "documentation": "The sso_session must be specified", + "config": { + "profiles": {"test": {"region": "us-west-2"}}, + "sso_sessions": { + "admin": { + "sso_region": "us-east-1", + "sso_start_url": "https://d-abc123.awsapps.com/start", + } + }, + }, + "resolves": False, + }, +] + + +def _create_mock_session(config): + mock_session = mock.Mock(spec=AioSession) + mock_session.get_config_variable.return_value = "test" + mock_session.full_config = config + return mock_session + + +def _run_token_provider_test_case(provider, test_case): + expected_exception = test_case.get("expectedException") + if expected_exception is not None: + with pytest.raises(expected_exception): + auth_token = provider.load_token() + return + + auth_token = provider.load_token() + if test_case["resolves"]: + assert auth_token is not None + else: + assert auth_token is None + + +@pytest.mark.moto +@parametrize(sso_provider_resolution_cases) +def test_sso_token_provider_resolution(test_case): + mock_session = _create_mock_session(test_case["config"]) + resolver = AioSSOTokenProvider(mock_session) + + _run_token_provider_test_case(resolver, test_case) + + +@pytest.mark.moto +@parametrize(sso_provider_resolution_cases) +def test_sso_token_provider_profile_name_overrides_session_profile(test_case): + mock_session = _create_mock_session(test_case["config"]) + mock_session.get_config_variable.return_value = "default" + resolver = AioSSOTokenProvider(mock_session, profile_name='test') + + _run_token_provider_test_case(resolver, test_case) + + +sso_provider_refresh_cases = [ + { + "documentation": "Valid token with all fields", + "currentTime": "2021-12-25T13:30:00Z", + "cachedToken": { + "startUrl": "https://d-123.awsapps.com/start", + "region": "us-west-2", + "accessToken": "cachedtoken", + "expiresAt": "2021-12-25T21:30:00Z", + "clientId": "clientid", + "clientSecret": "YSBzZWNyZXQ=", + "registrationExpiresAt": "2022-12-25T13:30:00Z", + "refreshToken": "cachedrefreshtoken", + }, + "expectedToken": { + "token": "cachedtoken", + "expiration": "2021-12-25T21:30:00Z", + }, + }, + { + "documentation": "Minimal valid cached token", + "currentTime": "2021-12-25T13:30:00Z", + "cachedToken": { + "accessToken": "cachedtoken", + "expiresAt": "2021-12-25T21:30:00Z", + }, + "expectedToken": { + "token": "cachedtoken", + "expiration": "2021-12-25T21:30:00Z", + }, + }, + { + "documentation": "Minimal expired cached token", + "currentTime": "2021-12-25T13:30:00Z", + "cachedToken": { + "accessToken": "cachedtoken", + "expiresAt": "2021-12-25T13:00:00Z", + }, + "expectedException": TokenRetrievalError, + }, + { + "documentation": "Token missing the expiresAt field", + "currentTime": "2021-12-25T13:30:00Z", + "cachedToken": {"accessToken": "cachedtoken"}, + "expectedException": SSOTokenLoadError, + }, + { + "documentation": "Token missing the accessToken field", + "currentTime": "2021-12-25T13:30:00Z", + "cachedToken": {"expiresAt": "2021-12-25T13:00:00Z"}, + "expectedException": SSOTokenLoadError, + }, + { + "documentation": "Expired token refresh with refresh token", + "currentTime": "2021-12-25T13:30:00Z", + "cachedToken": { + "startUrl": "https://d-123.awsapps.com/start", + "region": "us-west-2", + "accessToken": "cachedtoken", + "expiresAt": "2021-12-25T13:00:00Z", + "clientId": "clientid", + "clientSecret": "YSBzZWNyZXQ=", + "registrationExpiresAt": "2022-12-25T13:30:00Z", + "refreshToken": "cachedrefreshtoken", + }, + "refreshResponse": { + "tokenType": "Bearer", + "accessToken": "newtoken", + "expiresIn": 28800, + "refreshToken": "newrefreshtoken", + }, + "expectedTokenWriteback": { + "startUrl": "https://d-123.awsapps.com/start", + "region": "us-west-2", + "accessToken": "newtoken", + "expiresAt": "2021-12-25T21:30:00Z", + "clientId": "clientid", + "clientSecret": "YSBzZWNyZXQ=", + "registrationExpiresAt": "2022-12-25T13:30:00Z", + "refreshToken": "newrefreshtoken", + }, + "expectedToken": { + "token": "newtoken", + "expiration": "2021-12-25T21:30:00Z", + }, + }, + { + "documentation": "Expired token refresh without new refresh token", + "currentTime": "2021-12-25T13:30:00Z", + "cachedToken": { + "startUrl": "https://d-123.awsapps.com/start", + "region": "us-west-2", + "accessToken": "cachedtoken", + "expiresAt": "2021-12-25T13:00:00Z", + "clientId": "clientid", + "clientSecret": "YSBzZWNyZXQ=", + "registrationExpiresAt": "2022-12-25T13:30:00Z", + "refreshToken": "cachedrefreshtoken", + }, + "refreshResponse": { + "tokenType": "Bearer", + "accessToken": "newtoken", + "expiresIn": 28800, + }, + "expectedTokenWriteback": { + "startUrl": "https://d-123.awsapps.com/start", + "region": "us-west-2", + "accessToken": "newtoken", + "expiresAt": "2021-12-25T21:30:00Z", + "clientId": "clientid", + "clientSecret": "YSBzZWNyZXQ=", + "registrationExpiresAt": "2022-12-25T13:30:00Z", + }, + "expectedToken": { + "token": "newtoken", + "expiration": "2021-12-25T21:30:00Z", + }, + }, + { + "documentation": "Expired token and expired client registration", + "currentTime": "2021-12-25T13:30:00Z", + "cachedToken": { + "startUrl": "https://d-123.awsapps.com/start", + "region": "us-west-2", + "accessToken": "cachedtoken", + "expiresAt": "2021-10-25T13:00:00Z", + "clientId": "clientid", + "clientSecret": "YSBzZWNyZXQ=", + "registrationExpiresAt": "2021-11-25T13:30:00Z", + "refreshToken": "cachedrefreshtoken", + }, + "expectedException": TokenRetrievalError, + }, +] + + +@pytest.mark.moto +@pytest.mark.asyncio +@parametrize(sso_provider_refresh_cases) +async def test_sso_token_provider_refresh(test_case): + config = { + "profiles": {"test": {"sso_session": "admin"}}, + "sso_sessions": { + "admin": { + "sso_region": "us-west-2", + "sso_start_url": "https://d-123.awsapps.com/start", + } + }, + } + cache_key = "d033e22ae348aeb5660fc2140aec35850c4da997" + token_cache = {} + + # Prepopulate the token cache + cached_token = test_case.pop("cachedToken", None) + if cached_token: + token_cache[cache_key] = cached_token + + mock_session = _create_mock_session(config) + mock_sso_oidc = mock.Mock() + mock_session.create_client.return_value = mock_sso_oidc + + refresh_response = test_case.pop("refreshResponse", None) + mock_sso_oidc.create_token = mock.AsyncMock(return_value=refresh_response) + + current_time = dateutil.parser.parse(test_case.pop("currentTime")) + + def _time_fetcher(): + return current_time + + resolver = AioSSOTokenProvider( + mock_session, + token_cache, + time_fetcher=_time_fetcher, + ) + + auth_token = resolver.load_token() + + actual_exception = None + try: + actual_token = await auth_token.get_frozen_token() + except Exception as e: + actual_exception = e + + expected_exception = test_case.pop("expectedException", None) + if expected_exception is not None: + assert isinstance(actual_exception, expected_exception) + elif actual_exception is not None: + raise actual_exception + + expected_token = test_case.pop("expectedToken", {}) + raw_token = expected_token.get("token") + if raw_token is not None: + assert actual_token.token == raw_token + + raw_expiration = expected_token.get("expiration") + if raw_expiration is not None: + expected_expiration = dateutil.parser.parse(raw_expiration) + assert actual_token.expiration == expected_expiration + + expected_token_write_back = test_case.pop("expectedTokenWriteback", None) + if expected_token_write_back: + mock_sso_oidc.create_token.assert_called_with( + grantType="refresh_token", + clientId=cached_token["clientId"], + clientSecret=cached_token["clientSecret"], + refreshToken=cached_token["refreshToken"], + ) + raw_expiration = expected_token_write_back["expiresAt"] + # The in-memory cache doesn't serialize to JSON so expect a datetime + expected_expiration = dateutil.parser.parse(raw_expiration) + expected_token_write_back["expiresAt"] = expected_expiration + assert expected_token_write_back == token_cache[cache_key] + + # Pop the documentation to ensure all test fields are handled + test_case.pop("documentation") + assert not test_case.keys(), "All fields of test case should be handled" diff --git a/tests/test_mturk.py b/tests/test_mturk.py index cb7b1d36..865d0854 100644 --- a/tests/test_mturk.py +++ b/tests/test_mturk.py @@ -33,7 +33,7 @@ async def test_mturk_stubber(session): {'QualificationTypeId': ANY}, ) - response = await client.list_hi_ts_for_qualification_type( + response = await client.list_hits_for_qualification_type( QualificationTypeId='string' ) assert response == _mturk_list_hits_response diff --git a/tests/test_patches.py b/tests/test_patches.py index acd30218..5fd64657 100644 --- a/tests/test_patches.py +++ b/tests/test_patches.py @@ -72,6 +72,7 @@ RestXMLParser, create_parser, ) +from botocore.regions import EndpointRulesetResolver from botocore.response import StreamingBody, get_response from botocore.retries import adaptive, special, standard from botocore.retries.bucket import TokenBucket @@ -86,6 +87,11 @@ generate_presigned_post, generate_presigned_url, ) +from botocore.tokens import ( + DeferredRefreshableToken, + SSOTokenProvider, + create_token_resolver, +) from botocore.utils import ( ContainerMetadataFetcher, IMDSFetcher, @@ -93,6 +99,7 @@ InstanceMetadataFetcher, InstanceMetadataRegionFetcher, S3RegionRedirector, + S3RegionRedirectorv2, ) from botocore.waiter import ( NormalizedOperationMethod, @@ -140,10 +147,13 @@ _API_DIGESTS = { # args.py ClientArgsCreator.get_client_args: { - '5e5b18cb0b466d3acb2e0ecacbc8dc78de4022fc' + '63f26e3225338f285b98a4fe9bdcd1057a0f61b2' + }, + ClientArgsCreator._build_endpoint_resolver: { + '9aa226b8d6f09f7270633b8cc35bc82a15386ee4' }, # client.py - ClientCreator.create_client: {'3af567fcde81899a3b722d9cafd6a5c78e8ea08c'}, + ClientCreator.create_client: {'ef5bef8f4b2887143165e72554fd85c36af7e822'}, ClientCreator._create_client_class: { 'fcecaf8d4f2c1ac3c5d0eb50c573233ef86d641d' }, @@ -151,10 +161,10 @@ '483c6c8e035810d1b76110fc1956de76943c2f18' }, ClientCreator._get_client_args: { - 'cc8da937425ba78f715304a82cec346dedb6292e' + '86edebe5bc72e89e75c593e8f4bf2a3c358f1d8f' }, ClientCreator._register_s3_events: { - 'e2ada7e2fcc23f62a414a9dc806a50c0fe6c135c' + '5659a5312caeb3ea97d663d837d6d201f08824f2' }, ClientCreator._register_retries: { '16d3064142e5f9e45b0094bbfabf7be30183f255' @@ -168,18 +178,21 @@ ClientCreator._register_legacy_retries: { '000b2f2a122602e2e741ec2e89308dc2e2b67329' }, - BaseClient._make_api_call: {'ba323d78c89c292efe7fec6b74fe6c258b63d565'}, + BaseClient._make_api_call: {'bac0b84ebf6276a0c7510095ff168e6fe86a64f6'}, BaseClient._make_request: {'cfd8bbf19ea132134717cdf9c460694ddacdbf58'}, BaseClient._convert_to_request_dict: { - '2e6eb436e95822f993d70d6127ae11e20689f9c4' + '2e423ea67f8773c1a1a64bc516d5102555f4f61a' }, BaseClient._emit_api_params: {'abd67874dae8d5cd2788412e4699398cb915a119'}, + BaseClient._resolve_endpoint_ruleset: { + 'e8e7fe581a2e4ff1a75d1ee923c0ed2c6a0d9c9d' + }, BaseClient.get_paginator: {'3531d5988aaaf0fbb3885044ccee1a693ec2608b'}, BaseClient.get_waiter: {'44f0473d993d49ac7502984a7ccee3240b088404'}, BaseClient.__getattr__: {'3ec17f468f50789fa633d6041f40b66a2f593e77'}, # config.py Config.merge: {'c3dd8c3ffe0da86953ceba4a35267dfb79c6a2c8'}, - Config: {'df1410e13b577bd3c1affa83309c206478907316'}, + Config: {'90f26fb2f264c0424d60f035494402eac69de269'}, # credentials.py create_mfa_serial_refresher: {'9b5e98782fcacdcea5899a6d0d29d1b9de348bb0'}, Credentials.get_frozen_credentials: { @@ -216,8 +229,8 @@ RefreshableCredentials.get_frozen_credentials: { 'f661c84a8b759786e011f0b1e8a468a0c6294e36' }, - SSOCredentialFetcher: {'ac0cc1f456392f844c7182af635ee87e9fe9cf02'}, - SSOProvider.load: {'f43d79e1520b2a7b7ef85cd537f41e19d4bce806'}, + SSOCredentialFetcher: {'fa2a1dd73e0ec37e250c97f55a7b2c341a7f836a'}, + SSOProvider.load: {'67aba81dd1def437f2035f5e20b0720b328d970a'}, CachedCredentialFetcher._get_credentials: { '02a7d13599d972e3f258d2b53f87eeda4cc3e3a4' }, @@ -254,7 +267,7 @@ '935ae28fdb1c76f419523d4030265f8c4d9d0b00' }, InstanceMetadataProvider.load: { - '2016921ddb86cab5ec699ab217e847a300745990' + '15becfc0373ccfbc1bb200bd6a34731e61561d06' }, ProfileProviderBuilder._create_process_provider: { 'c5eea47bcfc449a6d73a9892bd0e1897f6be0c20' @@ -269,7 +282,7 @@ '478745fa6779a7c69fe9441d89d3e921438e3a59' }, ProfileProviderBuilder._create_sso_provider: { - '258e6d07bdf40ea2c7551bae0cd6e1ab58e4e502' + 'e463160179add7a1a513e46ee848447a216504aa' }, ConfigProvider.load: {'d0714da9f1f54cebc555df82f181c4913ce97258'}, SharedCredentialProvider.load: { @@ -385,20 +398,33 @@ '3cf7bb1ecff0d72bafd7e7fd6625595b4060abd6' }, # NOTE: if this hits we need to change our ResponseParser impl in JSONParser - JSONParser.parse: {'38231a2fffddfa6e91c56c2a01134459e365beb3'}, + JSONParser.parse: {'c2153eac3789855f4fc6a816a1f30a6afe0cf969'}, RestJSONParser._create_event_stream: { '0564ba55383a71cc1ba3e5be7110549d7e9992f5' }, create_parser: {'37e9f1c3b60de17f477a9b79eae8e1acaa7c89d7'}, + # regions.py + EndpointRulesetResolver.construct_endpoint: { + 'ccbed61e316a0e92e1d0f67c554ee15efa4ee6b8' + }, + EndpointRulesetResolver._get_provider_params: { + 'e17f8fce4a5d8adba932cb85e588f369845ce534' + }, + EndpointRulesetResolver._get_customized_builtins: { + '41085e0e1ac19915c24339f25b8d966708905fd0' + }, # response.py StreamingBody: {'73cb1276dfb509331b964d3d5ed69e5efa008de5'}, get_response: {'6515f43730b546419695c26d4bc0d198fde54b10'}, # session.py - Session.__init__: {'d0d3b11d6feb4783d2a7399246ce02c58e2c34e7'}, + Session.__init__: {'c796153d589ea6fe46a3a1afa2c460f06a1c37a2'}, Session._register_response_parser_factory: { 'bb8f7f3cc4d9ff9551f0875604747c4bb5030ff6' }, - Session.create_client: {'a179ef6d370020181d99fcb6cb1279e948d72afa'}, + Session.create_client: {'8b1bd136aba5d0e519816aca7354b3d1e2dee7ec'}, + Session._create_token_resolver: { + '142df7a219db0dd9c96fd81dc9e84a764a2fe5fb' + }, Session._create_credential_resolver: { '87e98d201c72d06f7fbdb4ebee2dce1c09de0fb2' }, @@ -417,21 +443,43 @@ RequestSigner.sign: {'d90346d5e066e89cd902c5c936f59b644ecde275'}, RequestSigner.get_auth: {'4f8099bef30f9a72fa3bcaa1bd3d22c4fbd224a8'}, RequestSigner.get_auth_instance: { - 'c2c34a0f44cac8819c7e9b74ca52dc82a28a1a08' + '4f9be5feafd6c08ffd7bb8de3c9bc36bc02cbfc8' }, - RequestSigner._choose_signer: {'eb82bd279d8c6cb7c93f7330a45544f0dda73170'}, + RequestSigner._choose_signer: {'bd0e9784029b8aa182b5aec73910d94cb67c36b0'}, RequestSigner.generate_presigned_url: { '417682868eacc10bf4c65f3dfbdba7d20d9250db' }, add_generate_presigned_url: {'5820f74ac46b004eb79e00eea1adc467bcf4defe'}, - generate_presigned_url: {'03cb4e442690f2df47f1580c66345b12764dee0e'}, + generate_presigned_url: {'4bbb8eea8ebdd3d49a3c9739a990eb219ed12cc4'}, S3PostPresigner.generate_presigned_post: { '269efc9af054a2fd2728d5b0a27db82c48053d7f' }, add_generate_presigned_post: {'e30360f2bd893fabf47f5cdb04b0de420ccd414d'}, - generate_presigned_post: {'56687f0abdf1451951cffabefa9d970256fca420'}, + generate_presigned_post: {'1b48275e09e9c1f872a1d16e74d7e40f34cfaf90'}, add_generate_db_auth_token: {'f61014e6fac4b5c7ee7ac2d2bec15fb16fa9fbe5'}, generate_db_auth_token: {'1f37e1e5982d8528841ce6b79f229b3e23a18959'}, + # tokens.py + create_token_resolver: {'b287f4879235a4292592a49b201d2b0bc2dbf401'}, + DeferredRefreshableToken.__init__: { + '199254ed7e211119bdebf285c5d9a9789f6dc540' + }, + DeferredRefreshableToken.get_frozen_token: { + '846a689a25550c63d2a460555dc27148abdcc992' + }, + DeferredRefreshableToken._refresh: { + '92af1e549b5719caa246a81493823a37a684d017' + }, + DeferredRefreshableToken._protected_refresh: { + 'bd5c1911626e420005e0e60d583a73c68925f4b6' + }, + SSOTokenProvider._attempt_create_token: { + '9cf7b75618a253d585819485e5da641cef129d46' + }, + SSOTokenProvider._refresh_access_token: { + 'cb179d1f262e41cc03a7c218e624e8c7fbeeaf19' + }, + SSOTokenProvider._refresher: {'824d41775dbb8a05184f6e9c7b2ea7202b72f2a9'}, + SSOTokenProvider.load_token: {'aea8584ef3fb83948ed82f2a2518eec40fb537a0'}, # utils.py ContainerMetadataFetcher.__init__: { '46d90a7249ba8389feb487779b0a02e6faa98e57' @@ -479,6 +527,12 @@ InstanceMetadataFetcher._needs_retry_for_credentials: { 'e7e5a8ce541110eb79bf98414171d3a1c137e32b' }, + S3RegionRedirectorv2.redirect_from_error: { + 'ac37ca2ca48f7bde42d9659c01d5bd5bc08a78f9' + }, + S3RegionRedirectorv2.get_bucket_region: { + 'b5bbc8b010576668dc2812d657c4b48af79e8f99' + }, S3RegionRedirector.redirect_from_error: { '3863b2c6472513b7896bfccc9dfd2567c472f441' }, @@ -569,7 +623,7 @@ AwsChunkedWrapper.__iter__: {'261e26d1061655555fe3dcb2689d963e43f80fb0'}, apply_request_checksum: {'bcc044f0655f30769994efab72b29e76d73f7e39'}, _apply_request_trailer_checksum: { - '55c36eaf4701a379fcdbd78d0b7a831e5023a76e' + '28cdf19282be7cd2c99a734831ec4f489648bcc7' }, # retryhandler.py retryhandler.create_retry_handler: {