Skip to content

Commit

Permalink
Support up-to-date botocore (aio-libs#1063)
Browse files Browse the repository at this point in the history
* bump botocore dependency specification
  • Loading branch information
jakob-keller authored Dec 13, 2023
1 parent 1ce6853 commit 110b0a2
Show file tree
Hide file tree
Showing 8 changed files with 177 additions and 34 deletions.
22 changes: 11 additions & 11 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,25 +1,25 @@
repos:
- repo: 'https://github.com/pre-commit/pre-commit-hooks'
rev: v4.4.0
rev: v4.5.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: 'https://github.com/PyCQA/isort'
rev: 5.12.0
hooks:
- id: isort
- repo: 'https://github.com/pycqa/flake8'
rev: 6.0.0
hooks:
- id: flake8
- repo: 'https://github.com/asottile/pyupgrade'
rev: v3.3.2
rev: v3.15.0
hooks:
- id: pyupgrade
args:
- '--py36-plus'
- repo: 'https://github.com/PyCQA/isort'
rev: 5.12.0
hooks:
- id: isort
- repo: 'https://github.com/psf/black'
rev: 23.3.0
rev: 23.11.0
hooks:
- id: black
- repo: 'https://github.com/pycqa/flake8'
rev: 6.1.0
hooks:
- id: flake8
4 changes: 4 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
Changes
-------

2.9.0 (2023-12-12)
^^^^^^^^^^^^^^^^^^
* bump botocore dependency specification

2.8.0 (2023-11-28)
^^^^^^^^^^^^^^^^^^
* add AioStubber that returns AioAWSResponse()
Expand Down
2 changes: 1 addition & 1 deletion aiobotocore/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '2.8.0'
__version__ = '2.9.0'
34 changes: 29 additions & 5 deletions aiobotocore/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,12 @@

from . import waiter
from .args import AioClientArgsCreator
from .credentials import AioRefreshableCredentials
from .discovery import AioEndpointDiscoveryHandler, AioEndpointDiscoveryManager
from .httpchecksum import apply_request_checksum
from .paginate import AioPaginator
from .retries import adaptive, standard
from .utils import AioS3RegionRedirectorv2
from .utils import AioS3ExpressIdentityResolver, AioS3RegionRedirectorv2

history_recorder = get_global_history_recorder()

Expand Down Expand Up @@ -96,6 +97,7 @@ async def create_client(
client_config=client_config,
scoped_config=scoped_config,
)
self._register_s3express_events(client=service_client)
self._register_s3_control_events(client=service_client)
self._register_endpoint_discovery(
service_client, endpoint_url, client_config
Expand Down Expand Up @@ -223,6 +225,20 @@ def _register_endpoint_discovery(self, client, endpoint_url, config):
block_endpoint_discovery_required_operations,
)

def _register_s3express_events(
self,
client,
endpoint_bridge=None,
endpoint_url=None,
client_config=None,
scoped_config=None,
):
if client.meta.service_model.service_name != 's3':
return
AioS3ExpressIdentityResolver(
client, AioRefreshableCredentials
).register()

def _register_s3_events(
self,
client,
Expand Down Expand Up @@ -331,11 +347,17 @@ async def _make_api_call(self, operation_name, api_params):
operation_model=operation_model,
context=request_context,
)
# fmt: off
endpoint_url, additional_headers = await self._resolve_endpoint_ruleset(
(
endpoint_url,
additional_headers,
properties,
) = await self._resolve_endpoint_ruleset(
operation_model, api_params, request_context
)
# fmt: on
if properties:
# Pass arbitrary endpoint info with the Request
# for use during construction.
request_context['endpoint_properties'] = properties
request_dict = await self._convert_to_request_dict(
api_params=api_params,
operation_model=operation_model,
Expand Down Expand Up @@ -482,6 +504,7 @@ async def _resolve_endpoint_ruleset(
if self._ruleset_resolver is None:
endpoint_url = self.meta.endpoint_url
additional_headers = {}
endpoint_properties = {}
else:
endpoint_info = await self._ruleset_resolver.construct_endpoint(
operation_model=operation_model,
Expand All @@ -490,6 +513,7 @@ async def _resolve_endpoint_ruleset(
)
endpoint_url = endpoint_info.url
additional_headers = endpoint_info.headers
endpoint_properties = endpoint_info.properties
# If authSchemes is present, overwrite default auth type and
# signing context derived from service model.
auth_schemes = endpoint_info.properties.get('authSchemes')
Expand All @@ -506,7 +530,7 @@ async def _resolve_endpoint_ruleset(
else:
request_context['signing'] = signing_context

return endpoint_url, additional_headers
return endpoint_url, additional_headers, endpoint_properties

def get_paginator(self, operation_name):
"""Create a paginator for an operation.
Expand Down
31 changes: 25 additions & 6 deletions aiobotocore/signers.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,12 @@ async def sign(
kwargs['region_name'] = signing_context['region']
if signing_context.get('signing_name'):
kwargs['signing_name'] = signing_context['signing_name']
if signing_context.get('identity_cache') is not None:
self._resolve_identity_cache(
kwargs,
signing_context['identity_cache'],
signing_context['cache_key'],
)
try:
auth = await self.get_auth_instance(**kwargs)
except UnknownSignatureVersionError as e:
Expand Down Expand Up @@ -141,11 +147,16 @@ async def get_auth_instance(
auth = cls(frozen_token)
return auth

credentials = self._credentials
if getattr(cls, "REQUIRES_IDENTITY_CACHE", None) is True:
cache = kwargs["identity_cache"]
key = kwargs["cache_key"]
credentials = await cache.get_credentials(key)
del kwargs["cache_key"]

frozen_credentials = None
if self._credentials is not None:
frozen_credentials = (
await self._credentials.get_frozen_credentials()
)
if credentials is not None:
frozen_credentials = await credentials.get_frozen_credentials()
kwargs['credentials'] = frozen_credentials
if cls.REQUIRES_REGION:
if self._region_name is None:
Expand Down Expand Up @@ -331,7 +342,11 @@ async def generate_presigned_url(
context=context,
)
bucket_is_arn = ArnParser.is_arn(params.get('Bucket', ''))
endpoint_url, additional_headers = await self._resolve_endpoint_ruleset(
(
endpoint_url,
additional_headers,
properties,
) = await self._resolve_endpoint_ruleset(
operation_model,
params,
context,
Expand Down Expand Up @@ -396,7 +411,11 @@ async def generate_presigned_post(
context=context,
)
bucket_is_arn = ArnParser.is_arn(params.get('Bucket', ''))
endpoint_url, additional_headers = await self._resolve_endpoint_ruleset(
(
endpoint_url,
additional_headers,
properties,
) = await self._resolve_endpoint_ruleset(
operation_model,
params,
context,
Expand Down
70 changes: 70 additions & 0 deletions aiobotocore/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import asyncio
import functools
import inspect
import json
import logging
Expand All @@ -17,11 +18,14 @@
ClientError,
ContainerMetadataFetcher,
HTTPClientError,
IdentityCache,
IMDSFetcher,
IMDSRegionProvider,
InstanceMetadataFetcher,
InstanceMetadataRegionFetcher,
ReadTimeoutError,
S3ExpressIdentityCache,
S3ExpressIdentityResolver,
S3RegionRedirector,
S3RegionRedirectorv2,
get_environ_proxies,
Expand Down Expand Up @@ -348,6 +352,72 @@ async def _get_region(self):
return region


class AioIdentityCache(IdentityCache):
async def get_credentials(self, **kwargs):
callback = self.build_refresh_callback(**kwargs)
metadata = await callback()
credential_entry = self._credential_cls.create_from_metadata(
metadata=metadata,
refresh_using=callback,
method=self.METHOD,
advisory_timeout=45,
mandatory_timeout=10,
)
return credential_entry


class AioS3ExpressIdentityCache(AioIdentityCache, S3ExpressIdentityCache):
@functools.cached_property
def _aio_credential_cache(self):
"""Substitutes upstream credential cache."""
return {}

async def get_credentials(self, bucket):
# upstream uses `@functools.lru_cache(maxsize=100)` to cache credentials.
# This is incompatible with async code.
# We need to implement custom caching logic.

if (credentials := self._aio_credential_cache.get(bucket)) is None:
# cache miss -> get credentials asynchronously
credentials = await super().get_credentials(bucket=bucket)

# upstream cache is bounded at 100 entries
if len(self._aio_credential_cache) >= 100:
# drop oldest entry from cache (deviates from lru_cache logic)
self._aio_credential_cache.pop(
next(iter(self._aio_credential_cache)),
)

self._aio_credential_cache[bucket] = credentials

return credentials

def build_refresh_callback(self, bucket):
async def refresher():
response = await self._client.create_session(Bucket=bucket)
creds = response['Credentials']
expiration = self._serialize_if_needed(
creds['Expiration'], iso=True
)
return {
"access_key": creds['AccessKeyId'],
"secret_key": creds['SecretAccessKey'],
"token": creds['SessionToken'],
"expiry_time": expiration,
}

return refresher


class AioS3ExpressIdentityResolver(S3ExpressIdentityResolver):
def __init__(self, client, credential_cls, cache=None):
super().__init__(client, credential_cls, cache)

if cache is None:
cache = AioS3ExpressIdentityCache(self._client, credential_cls)
self._cache = cache


class AioS3RegionRedirectorv2(S3RegionRedirectorv2):
async def redirect_from_error(
self,
Expand Down
6 changes: 3 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,15 @@
# NOTE: When updating botocore make sure to update awscli/boto3 versions below
install_requires = [
# pegged to also match items in `extras_require`
'botocore>=1.32.4,<1.33.2',
'botocore>=1.33.2,<1.33.14',
'aiohttp>=3.7.4.post0,<4.0.0',
'wrapt>=1.10.10, <2.0.0',
'aioitertools>=0.5.1,<1.0.0',
]

extras_require = {
'awscli': ['awscli>=1.30.4,<1.31.2'],
'boto3': ['boto3>=1.29.4,<1.33.2'],
'awscli': ['awscli>=1.31.2,<1.31.14'],
'boto3': ['boto3>=1.33.2,<1.33.14'],
}


Expand Down
Loading

0 comments on commit 110b0a2

Please sign in to comment.