diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml
index b63d0331edd..829d005734d 100644
--- a/.github/workflows/auto-merge.yml
+++ b/.github/workflows/auto-merge.yml
@@ -14,7 +14,7 @@ jobs:
steps:
- name: Dependabot metadata
id: metadata
- uses: dependabot/fetch-metadata@v1.3.3
+ uses: dependabot/fetch-metadata@v1.3.4
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Enable auto-merge for mypy-boto3 stubs Dependabot PRs
diff --git a/.github/workflows/publish_v2_layer.yml b/.github/workflows/publish_v2_layer.yml
new file mode 100644
index 00000000000..850063098cd
--- /dev/null
+++ b/.github/workflows/publish_v2_layer.yml
@@ -0,0 +1,19 @@
+name: Deploy v2 layer to all regions
+
+permissions:
+ id-token: write
+ contents: read
+
+on:
+ workflow_dispatch:
+ inputs:
+ latest_published_version:
+ description: "Latest PyPi published version to rebuild latest docs for, e.g. v1.22.0"
+ required: true
+
+jobs:
+ dummy:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Hello world
+ run: echo "hello world"
diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml
index 29ec0afaad5..812ae7b41dd 100644
--- a/.github/workflows/python_build.yml
+++ b/.github/workflows/python_build.yml
@@ -53,7 +53,7 @@ jobs:
- name: Complexity baseline
run: make complexity-baseline
- name: Upload coverage to Codecov
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # 3.1.0
+ uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # 3.1.1
with:
file: ./coverage.xml
env_vars: PYTHON
diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml
index 7786903e218..86176968839 100644
--- a/.github/workflows/run-e2e-tests.yml
+++ b/.github/workflows/run-e2e-tests.yml
@@ -36,7 +36,7 @@ jobs:
- name: Install poetry
run: pipx install poetry
- name: "Use Python"
- uses: actions/setup-python@v3
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.version }}
architecture: "x64"
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 509f9f7d076..8cd2040d780 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,65 @@
# Unreleased
+## Bug Fixes
+
+* **apigateway:** update Response class to require status_code only ([#1560](https://github.com/awslabs/aws-lambda-powertools-python/issues/1560))
+* **ci:** integrate isort 5.0 with black to resolve conflicts
+* **event_sources:** implement Mapping protocol on DictWrapper for better interop with existing middlewares ([#1516](https://github.com/awslabs/aws-lambda-powertools-python/issues/1516))
+* **typing:** fix mypy error
+* **typing:** level arg in copy_config_to_registered_loggers ([#1534](https://github.com/awslabs/aws-lambda-powertools-python/issues/1534))
+
+## Documentation
+
+* **batch:** document the new lambda context feature
+* **homepage:** introduce POWERTOOLS_DEV env var ([#1569](https://github.com/awslabs/aws-lambda-powertools-python/issues/1569))
+* **multiple:** fix highlighting after new isort/black integration
+* **parser:** add JSON string field extension example ([#1526](https://github.com/awslabs/aws-lambda-powertools-python/issues/1526))
+
+## Features
+
+* **batch:** inject lambda_context if record handler signature accepts it ([#1561](https://github.com/awslabs/aws-lambda-powertools-python/issues/1561))
+* **event-handler:** context support to share data between routers ([#1567](https://github.com/awslabs/aws-lambda-powertools-python/issues/1567))
+* **logger:** introduce POWERTOOLS_DEBUG for internal debugging ([#1572](https://github.com/awslabs/aws-lambda-powertools-python/issues/1572))
+* **logger:** include logger name attribute when copy_config_to_registered_logger is used ([#1568](https://github.com/awslabs/aws-lambda-powertools-python/issues/1568))
+* **logger:** pretty-print JSON when POWERTOOLS_DEV is set ([#1548](https://github.com/awslabs/aws-lambda-powertools-python/issues/1548))
+
+## Maintenance
+
+* **deps:** bump codecov/codecov-action from 3.1.0 to 3.1.1 ([#1529](https://github.com/awslabs/aws-lambda-powertools-python/issues/1529))
+* **deps:** bump actions/setup-python from 3 to 4 ([#1528](https://github.com/awslabs/aws-lambda-powertools-python/issues/1528))
+* **deps:** bump email-validator from 1.2.1 to 1.3.0 ([#1533](https://github.com/awslabs/aws-lambda-powertools-python/issues/1533))
+* **deps:** bump dependabot/fetch-metadata from 1.3.3 to 1.3.4 ([#1565](https://github.com/awslabs/aws-lambda-powertools-python/issues/1565))
+* **deps:** bump fastjsonschema from 2.16.1 to 2.16.2 ([#1530](https://github.com/awslabs/aws-lambda-powertools-python/issues/1530))
+* **deps-dev:** bump mypy-boto3-s3 from 1.24.36.post1 to 1.24.76 ([#1531](https://github.com/awslabs/aws-lambda-powertools-python/issues/1531))
+* **deps-dev:** bump mypy-boto3-secretsmanager from 1.24.54 to 1.24.83 ([#1557](https://github.com/awslabs/aws-lambda-powertools-python/issues/1557))
+* **deps-dev:** bump pytest-cov from 3.0.0 to 4.0.0 ([#1551](https://github.com/awslabs/aws-lambda-powertools-python/issues/1551))
+* **deps-dev:** bump flake8-bugbear from 22.9.11 to 22.9.23 ([#1541](https://github.com/awslabs/aws-lambda-powertools-python/issues/1541))
+* **deps-dev:** bump mypy-boto3-ssm from 1.24.80 to 1.24.81 ([#1544](https://github.com/awslabs/aws-lambda-powertools-python/issues/1544))
+* **deps-dev:** bump mypy-boto3-ssm from 1.24.69 to 1.24.80 ([#1542](https://github.com/awslabs/aws-lambda-powertools-python/issues/1542))
+* **deps-dev:** bump mako from 1.2.2 to 1.2.3 ([#1537](https://github.com/awslabs/aws-lambda-powertools-python/issues/1537))
+* **deps-dev:** bump types-requests from 2.28.10 to 2.28.11 ([#1538](https://github.com/awslabs/aws-lambda-powertools-python/issues/1538))
+* **deps-dev:** bump mkdocs-material from 8.5.3 to 8.5.4 ([#1563](https://github.com/awslabs/aws-lambda-powertools-python/issues/1563))
+* **deps-dev:** bump types-requests from 2.28.11 to 2.28.11.1 ([#1571](https://github.com/awslabs/aws-lambda-powertools-python/issues/1571))
+* **deps-dev:** bump mkdocs-material from 8.5.1 to 8.5.3 ([#1532](https://github.com/awslabs/aws-lambda-powertools-python/issues/1532))
+* **docs:** bump layer version to 36 (1.29.2)
+* **layers:** add dummy v2 layer automation
+* **lint:** use new isort black integration
+* **multiple:** localize powertools_dev env logic and warning ([#1570](https://github.com/awslabs/aws-lambda-powertools-python/issues/1570))
+
+
+
+## [v1.29.2] - 2022-09-19
+## Bug Fixes
+
+* **deps:** bump dev dep mako version to address CVE-2022-40023 ([#1524](https://github.com/awslabs/aws-lambda-powertools-python/issues/1524))
+
+## Maintenance
+
+* **deps:** bump release-drafter/release-drafter from 5.20.1 to 5.21.0 ([#1520](https://github.com/awslabs/aws-lambda-powertools-python/issues/1520))
+* **deps-dev:** bump mkdocs-material from 8.5.0 to 8.5.1 ([#1521](https://github.com/awslabs/aws-lambda-powertools-python/issues/1521))
+* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.60 to 1.24.74 ([#1522](https://github.com/awslabs/aws-lambda-powertools-python/issues/1522))
+
## [v1.29.1] - 2022-09-13
@@ -2319,7 +2378,8 @@
* Merge pull request [#5](https://github.com/awslabs/aws-lambda-powertools-python/issues/5) from jfuss/feat/python38
-[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.29.1...HEAD
+[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.29.2...HEAD
+[v1.29.2]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.29.1...v1.29.2
[v1.29.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.29.0...v1.29.1
[v1.29.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.28.0...v1.29.0
[v1.28.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.27.0...v1.28.0
diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py
index ae02a659359..0e7b5a87838 100644
--- a/aws_lambda_powertools/event_handler/api_gateway.py
+++ b/aws_lambda_powertools/event_handler/api_gateway.py
@@ -10,12 +10,24 @@
from enum import Enum
from functools import partial
from http import HTTPStatus
-from typing import Any, Callable, Dict, List, Match, Optional, Pattern, Set, Tuple, Type, Union
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ List,
+ Match,
+ Optional,
+ Pattern,
+ Set,
+ Tuple,
+ Type,
+ Union,
+)
from aws_lambda_powertools.event_handler import content_types
from aws_lambda_powertools.event_handler.exceptions import NotFoundError, ServiceError
from aws_lambda_powertools.shared import constants
-from aws_lambda_powertools.shared.functions import resolve_truthy_env_var_choice
+from aws_lambda_powertools.shared.functions import powertools_dev_is_set, strtobool
from aws_lambda_powertools.shared.json_encoder import Encoder
from aws_lambda_powertools.utilities.data_classes import (
ALBEvent,
@@ -143,8 +155,8 @@ class Response:
def __init__(
self,
status_code: int,
- content_type: Optional[str],
- body: Union[str, bytes, None],
+ content_type: Optional[str] = None,
+ body: Union[str, bytes, None] = None,
headers: Optional[Dict] = None,
):
"""
@@ -237,6 +249,7 @@ def build(self, event: BaseProxyEvent, cors: Optional[CORSConfig] = None) -> Dic
class BaseRouter(ABC):
current_event: BaseProxyEvent
lambda_context: LambdaContext
+ context: dict
@abstractmethod
def route(
@@ -383,6 +396,14 @@ def lambda_handler(event, context):
"""
return self.route(rule, "PATCH", cors, compress, cache_control)
+ def append_context(self, **additional_context):
+ """Append key=value data as routing context"""
+ self.context.update(**additional_context)
+
+ def clear_context(self):
+ """Resets routing context"""
+ self.context.clear()
+
class ApiGatewayResolver(BaseRouter):
"""API Gateway and ALB proxy resolver
@@ -444,10 +465,9 @@ def __init__(
self._cors = cors
self._cors_enabled: bool = cors is not None
self._cors_methods: Set[str] = {"OPTIONS"}
- self._debug = resolve_truthy_env_var_choice(
- env=os.getenv(constants.EVENT_HANDLER_DEBUG_ENV, "false"), choice=debug
- )
+ self._debug = self._has_debug(debug)
self._strip_prefixes = strip_prefixes
+ self.context: Dict = {} # early init as customers might add context before event resolution
# Allow for a custom serializer or a concise json serialization
self._serializer = serializer or partial(json.dumps, separators=(",", ":"), cls=Encoder)
@@ -502,15 +522,37 @@ def resolve(self, event, context) -> Dict[str, Any]:
"You don't need to serialize event to Event Source Data Class when using Event Handler; see issue #1152"
)
event = event.raw_event
+
if self._debug:
print(self._json_dump(event), end="")
+
+ # Populate router(s) dependencies without keeping a reference to each registered router
BaseRouter.current_event = self._to_proxy_event(event)
BaseRouter.lambda_context = context
- return self._resolve().build(self.current_event, self._cors)
+
+ response = self._resolve().build(self.current_event, self._cors)
+ self.clear_context()
+ return response
def __call__(self, event, context) -> Any:
return self.resolve(event, context)
+ @staticmethod
+ def _has_debug(debug: Optional[bool] = None) -> bool:
+ # It might have been explicitly switched off (debug=False)
+ if debug is not None:
+ return debug
+
+ # Maintenance: deprecate EVENT_HANDLER_DEBUG later in V2.
+ env_debug = os.getenv(constants.EVENT_HANDLER_DEBUG_ENV)
+ if env_debug is not None:
+ warnings.warn(
+ "POWERTOOLS_EVENT_HANDLER_DEBUG is set and will be deprecated in V2. Please use POWERTOOLS_DEV instead."
+ )
+ return strtobool(env_debug) or powertools_dev_is_set()
+
+ return powertools_dev_is_set()
+
@staticmethod
def _compile_regex(rule: str):
"""Precompile regex pattern
@@ -705,7 +747,7 @@ def _json_dump(self, obj: Any) -> str:
return self._serializer(obj)
def include_router(self, router: "Router", prefix: Optional[str] = None) -> None:
- """Adds all routes defined in a router
+ """Adds all routes and context defined in a router
Parameters
----------
@@ -718,6 +760,11 @@ def include_router(self, router: "Router", prefix: Optional[str] = None) -> None
# Add reference to parent ApiGatewayResolver to support use cases where people subclass it to add custom logic
router.api_resolver = self
+ # Merge app and router context
+ self.context.update(**router.context)
+ # use pointer to allow context clearance after event is processed e.g., resolve(evt, ctx)
+ router.context = self.context
+
for route, func in router._routes.items():
if prefix:
rule = route[0]
@@ -733,6 +780,7 @@ class Router(BaseRouter):
def __init__(self):
self._routes: Dict[tuple, Callable] = {}
self.api_resolver: Optional[BaseRouter] = None
+ self.context = {} # early init as customers might add context before event resolution
def route(
self,
diff --git a/aws_lambda_powertools/event_handler/appsync.py b/aws_lambda_powertools/event_handler/appsync.py
index 4ddc51cd102..316792e4119 100644
--- a/aws_lambda_powertools/event_handler/appsync.py
+++ b/aws_lambda_powertools/event_handler/appsync.py
@@ -12,6 +12,7 @@
class BaseRouter:
current_event: AppSyncResolverEventT # type: ignore[valid-type]
lambda_context: LambdaContext
+ context: dict
def __init__(self):
self._resolvers: dict = {}
@@ -34,6 +35,14 @@ def register_resolver(func):
return register_resolver
+ def append_context(self, **additional_context):
+ """Append key=value data as routing context"""
+ self.context.update(**additional_context)
+
+ def clear_context(self):
+ """Resets routing context"""
+ self.context.clear()
+
class AppSyncResolver(BaseRouter):
"""
@@ -68,6 +77,7 @@ def common_field() -> str:
def __init__(self):
super().__init__()
+ self.context = {} # early init as customers might add context before event resolution
def resolve(
self, event: dict, context: LambdaContext, data_model: Type[AppSyncResolverEvent] = AppSyncResolverEvent
@@ -144,8 +154,12 @@ def lambda_handler(event, context):
# Maintenance: revisit generics/overload to fix [attr-defined] in mypy usage
BaseRouter.current_event = data_model(event)
BaseRouter.lambda_context = context
+
resolver = self._get_resolver(BaseRouter.current_event.type_name, BaseRouter.current_event.field_name)
- return resolver(**BaseRouter.current_event.arguments)
+ response = resolver(**BaseRouter.current_event.arguments)
+ self.clear_context()
+
+ return response
def _get_resolver(self, type_name: str, field_name: str) -> Callable:
"""Get resolver for field_name
@@ -182,9 +196,15 @@ def include_router(self, router: "Router") -> None:
router : Router
A router containing a dict of field resolvers
"""
+ # Merge app and router context
+ self.context.update(**router.context)
+ # use pointer to allow context clearance after event is processed e.g., resolve(evt, ctx)
+ router.context = self.context
+
self._resolvers.update(router._resolvers)
class Router(BaseRouter):
def __init__(self):
super().__init__()
+ self.context = {} # early init as customers might add context before event resolution
diff --git a/aws_lambda_powertools/event_handler/lambda_function_url.py b/aws_lambda_powertools/event_handler/lambda_function_url.py
index 6d5924e79b9..6978b29f451 100644
--- a/aws_lambda_powertools/event_handler/lambda_function_url.py
+++ b/aws_lambda_powertools/event_handler/lambda_function_url.py
@@ -1,7 +1,10 @@
from typing import Callable, Dict, List, Optional
from aws_lambda_powertools.event_handler import CORSConfig
-from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, ProxyEventType
+from aws_lambda_powertools.event_handler.api_gateway import (
+ ApiGatewayResolver,
+ ProxyEventType,
+)
from aws_lambda_powertools.utilities.data_classes import LambdaFunctionUrlEvent
diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py
index 1f01015051c..25f9c227c97 100644
--- a/aws_lambda_powertools/logging/formatter.py
+++ b/aws_lambda_powertools/logging/formatter.py
@@ -9,6 +9,7 @@
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union
from ..shared import constants
+from ..shared.functions import powertools_dev_is_set
RESERVED_LOG_ATTRS = (
"name",
@@ -111,9 +112,15 @@ def __init__(
Key-value to be included in log messages
"""
+
self.json_deserializer = json_deserializer or json.loads
self.json_default = json_default or str
- self.json_serializer = json_serializer or partial(json.dumps, default=self.json_default, separators=(",", ":"))
+ self.json_indent = (
+ constants.PRETTY_INDENT if powertools_dev_is_set() else constants.COMPACT_INDENT
+ ) # indented json serialization when in AWS SAM Local
+ self.json_serializer = json_serializer or partial(
+ json.dumps, default=self.json_default, separators=(",", ":"), indent=self.json_indent
+ )
self.datefmt = datefmt
self.use_datetime_directive = use_datetime_directive
diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py
index f70224cabae..a0e24f1dcf9 100644
--- a/aws_lambda_powertools/logging/logger.py
+++ b/aws_lambda_powertools/logging/logger.py
@@ -12,7 +12,11 @@
from ..shared.functions import resolve_env_var_choice, resolve_truthy_env_var_choice
from .exceptions import InvalidLoggerSamplingRateError
from .filters import SuppressFilter
-from .formatter import RESERVED_FORMATTER_CUSTOM_KEYS, BasePowertoolsFormatter, LambdaPowertoolsFormatter
+from .formatter import (
+ RESERVED_FORMATTER_CUSTOM_KEYS,
+ BasePowertoolsFormatter,
+ LambdaPowertoolsFormatter,
+)
from .lambda_context import build_lambda_context_model
logger = logging.getLogger(__name__)
diff --git a/aws_lambda_powertools/logging/utils.py b/aws_lambda_powertools/logging/utils.py
index 52dcfaff31e..05ac6d5001b 100644
--- a/aws_lambda_powertools/logging/utils.py
+++ b/aws_lambda_powertools/logging/utils.py
@@ -8,7 +8,7 @@
def copy_config_to_registered_loggers(
source_logger: Logger,
- log_level: Optional[str] = None,
+ log_level: Optional[Union[int, str]] = None,
exclude: Optional[Set[str]] = None,
include: Optional[Set[str]] = None,
) -> None:
@@ -19,7 +19,7 @@ def copy_config_to_registered_loggers(
----------
source_logger : Logger
Powertools Logger to copy configuration from
- log_level : str, optional
+ log_level : Union[int, str], optional
Logging level to set to registered loggers, by default uses source_logger logging level
include : Optional[Set[str]], optional
List of logger names to include, by default all registered loggers are included
@@ -81,6 +81,8 @@ def _configure_logger(source_logger: Logger, logger: logging.Logger, level: Unio
logger.handlers = []
logger.setLevel(level)
logger.propagate = False # ensure we don't propagate logs to existing loggers, #1073
+ source_logger.append_keys(name="%(name)s") # include logger name, see #1267
+
source_logger.debug(f"Logger {logger} reconfigured to use logging level {level}")
for source_handler in source_logger.handlers:
logger.addHandler(source_handler)
diff --git a/aws_lambda_powertools/package_logger.py b/aws_lambda_powertools/package_logger.py
index c1adb67ade8..e6c58ba9549 100644
--- a/aws_lambda_powertools/package_logger.py
+++ b/aws_lambda_powertools/package_logger.py
@@ -1,7 +1,25 @@
import logging
+from aws_lambda_powertools.logging.logger import set_package_logger
+from aws_lambda_powertools.shared.functions import powertools_debug_is_set
+
+
+def set_package_logger_handler(stream=None):
+ """Sets up Lambda Powertools package logging.
+
+ By default, we discard any output to not interfere with customers logging.
+
+ When POWERTOOLS_DEBUG env var is set, we setup `aws_lambda_powertools` logger in DEBUG level.
+
+ Parameters
+ ----------
+ stream: sys.stdout
+ log stream, stdout by default
+ """
+
+ if powertools_debug_is_set():
+ return set_package_logger(stream=stream)
-def set_package_logger_handler():
logger = logging.getLogger("aws_lambda_powertools")
logger.addHandler(logging.NullHandler())
logger.propagate = False
diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py
index 48d94d88f1d..86a6c2ac41b 100644
--- a/aws_lambda_powertools/shared/constants.py
+++ b/aws_lambda_powertools/shared/constants.py
@@ -32,3 +32,10 @@
"cold_start",
"xray_trace_id",
]
+
+# JSON indentation level
+PRETTY_INDENT: int = 4
+COMPACT_INDENT = None
+
+POWERTOOLS_DEV_ENV: str = "POWERTOOLS_DEV"
+POWERTOOLS_DEBUG_ENV: str = "POWERTOOLS_DEBUG"
diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py
index e9bc3521125..2212eb77e18 100644
--- a/aws_lambda_powertools/shared/functions.py
+++ b/aws_lambda_powertools/shared/functions.py
@@ -1,8 +1,12 @@
import base64
import logging
+import os
+import warnings
from binascii import Error as BinAsciiError
from typing import Optional, Union
+from aws_lambda_powertools.shared import constants
+
logger = logging.getLogger(__name__)
@@ -16,9 +20,9 @@ def strtobool(value: str) -> bool:
> note:: Copied from distutils.util.
"""
value = value.lower()
- if value in ("y", "yes", "t", "true", "on", "1"):
+ if value in ("1", "y", "yes", "t", "true", "on"):
return True
- if value in ("n", "no", "f", "false", "off", "0"):
+ if value in ("0", "n", "no", "f", "false", "off"):
return False
raise ValueError(f"invalid truth value {value!r}")
@@ -78,3 +82,21 @@ def bytes_to_string(value: bytes) -> str:
return value.decode("utf-8")
except (BinAsciiError, TypeError):
raise ValueError("base64 UTF-8 decode failed")
+
+
+def powertools_dev_is_set() -> bool:
+ is_on = strtobool(os.getenv(constants.POWERTOOLS_DEV_ENV, "0"))
+ if is_on:
+ warnings.warn("POWERTOOLS_DEV environment variable is enabled. Increasing verbosity across utilities.")
+ return True
+
+ return False
+
+
+def powertools_debug_is_set() -> bool:
+ is_on = strtobool(os.getenv(constants.POWERTOOLS_DEBUG_ENV, "0"))
+ if is_on:
+ warnings.warn("POWERTOOLS_DEBUG environment variable is enabled. Setting logging level to DEBUG.")
+ return True
+
+ return False
diff --git a/aws_lambda_powertools/tracing/extensions.py b/aws_lambda_powertools/tracing/extensions.py
index 6c641238c98..453647e51a3 100644
--- a/aws_lambda_powertools/tracing/extensions.py
+++ b/aws_lambda_powertools/tracing/extensions.py
@@ -8,7 +8,9 @@ def aiohttp_trace_config():
TraceConfig
aiohttp trace config
"""
- from aws_xray_sdk.ext.aiohttp.client import aws_xray_trace_config # pragma: no cover
+ from aws_xray_sdk.ext.aiohttp.client import (
+ aws_xray_trace_config, # pragma: no cover
+ )
aws_xray_trace_config.__doc__ = "aiohttp extension for X-Ray (aws_xray_trace_config)" # pragma: no cover
diff --git a/aws_lambda_powertools/utilities/batch/__init__.py b/aws_lambda_powertools/utilities/batch/__init__.py
index 463f6f7fbff..7db0781232c 100644
--- a/aws_lambda_powertools/utilities/batch/__init__.py
+++ b/aws_lambda_powertools/utilities/batch/__init__.py
@@ -13,7 +13,10 @@
batch_processor,
)
from aws_lambda_powertools.utilities.batch.exceptions import ExceptionInfo
-from aws_lambda_powertools.utilities.batch.sqs import PartialSQSProcessor, sqs_batch_processor
+from aws_lambda_powertools.utilities.batch.sqs import (
+ PartialSQSProcessor,
+ sqs_batch_processor,
+)
__all__ = (
"BatchProcessor",
diff --git a/aws_lambda_powertools/utilities/batch/base.py b/aws_lambda_powertools/utilities/batch/base.py
index 1122bea4c03..f2d7cd2ed74 100644
--- a/aws_lambda_powertools/utilities/batch/base.py
+++ b/aws_lambda_powertools/utilities/batch/base.py
@@ -4,6 +4,7 @@
Batch processing utilities
"""
import copy
+import inspect
import logging
import sys
from abc import ABC, abstractmethod
@@ -11,10 +12,18 @@
from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union, overload
from aws_lambda_powertools.middleware_factory import lambda_handler_decorator
-from aws_lambda_powertools.utilities.batch.exceptions import BatchProcessingError, ExceptionInfo
-from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord
-from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord
+from aws_lambda_powertools.utilities.batch.exceptions import (
+ BatchProcessingError,
+ ExceptionInfo,
+)
+from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import (
+ DynamoDBRecord,
+)
+from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import (
+ KinesisStreamRecord,
+)
from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord
+from aws_lambda_powertools.utilities.typing import LambdaContext
logger = logging.getLogger(__name__)
@@ -34,7 +43,9 @@ class EventType(Enum):
# We need them as subclasses as we must access their message ID or sequence number metadata via dot notation
if has_pydantic:
from aws_lambda_powertools.utilities.parser.models import DynamoDBStreamRecordModel
- from aws_lambda_powertools.utilities.parser.models import KinesisDataStreamRecord as KinesisDataStreamRecordModel
+ from aws_lambda_powertools.utilities.parser.models import (
+ KinesisDataStreamRecord as KinesisDataStreamRecordModel,
+ )
from aws_lambda_powertools.utilities.parser.models import SqsRecordModel
BatchTypeModels = Optional[
@@ -55,6 +66,8 @@ class BasePartialProcessor(ABC):
Abstract class for batch processors.
"""
+ lambda_context: LambdaContext
+
def __init__(self):
self.success_messages: List[BatchEventTypes] = []
self.fail_messages: List[BatchEventTypes] = []
@@ -94,7 +107,7 @@ def __enter__(self):
def __exit__(self, exception_type, exception_value, traceback):
self._clean()
- def __call__(self, records: List[dict], handler: Callable):
+ def __call__(self, records: List[dict], handler: Callable, lambda_context: Optional[LambdaContext] = None):
"""
Set instance attributes before execution
@@ -107,6 +120,31 @@ def __call__(self, records: List[dict], handler: Callable):
"""
self.records = records
self.handler = handler
+
+ # NOTE: If a record handler has `lambda_context` parameter in its function signature, we inject it.
+ # This is the earliest we can inspect for signature to prevent impacting performance.
+ #
+ # Mechanism:
+ #
+ # 1. When using the `@batch_processor` decorator, this happens automatically.
+ # 2. When using the context manager, customers have to include `lambda_context` param.
+ #
+ # Scenario: Injects Lambda context
+ #
+ # def record_handler(record, lambda_context): ... # noqa: E800
+ # with processor(records=batch, handler=record_handler, lambda_context=context): ... # noqa: E800
+ #
+ # Scenario: Does NOT inject Lambda context (default)
+ #
+ # def record_handler(record): pass # noqa: E800
+ # with processor(records=batch, handler=record_handler): ... # noqa: E800
+ #
+ if lambda_context is None:
+ self._handler_accepts_lambda_context = False
+ else:
+ self.lambda_context = lambda_context
+ self._handler_accepts_lambda_context = "lambda_context" in inspect.signature(self.handler).parameters
+
return self
def success_handler(self, record, result: Any) -> SuccessResponse:
@@ -155,7 +193,7 @@ def failure_handler(self, record, exception: ExceptionInfo) -> FailureResponse:
@lambda_handler_decorator
def batch_processor(
- handler: Callable, event: Dict, context: Dict, record_handler: Callable, processor: BasePartialProcessor
+ handler: Callable, event: Dict, context: LambdaContext, record_handler: Callable, processor: BasePartialProcessor
):
"""
Middleware to handle batch event processing
@@ -166,7 +204,7 @@ def batch_processor(
Lambda's handler
event: Dict
Lambda's Event
- context: Dict
+ context: LambdaContext
Lambda's Context
record_handler: Callable
Callable to process each record from the batch
@@ -193,7 +231,7 @@ def batch_processor(
"""
records = event["Records"]
- with processor(records, record_handler):
+ with processor(records, record_handler, lambda_context=context):
processor.process()
return handler(event, context)
@@ -365,7 +403,11 @@ def _process_record(self, record: dict) -> Union[SuccessResponse, FailureRespons
"""
data = self._to_batch_type(record=record, event_type=self.event_type, model=self.model)
try:
- result = self.handler(record=data)
+ if self._handler_accepts_lambda_context:
+ result = self.handler(record=data, lambda_context=self.lambda_context)
+ else:
+ result = self.handler(record=data)
+
return self.success_handler(record=record, result=result)
except Exception:
return self.failure_handler(record=data, exception=sys.exc_info())
diff --git a/aws_lambda_powertools/utilities/data_classes/alb_event.py b/aws_lambda_powertools/utilities/data_classes/alb_event.py
index 159779c86a7..c28ec0d72e2 100644
--- a/aws_lambda_powertools/utilities/data_classes/alb_event.py
+++ b/aws_lambda_powertools/utilities/data_classes/alb_event.py
@@ -1,6 +1,9 @@
from typing import Dict, List, Optional
-from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent, DictWrapper
+from aws_lambda_powertools.utilities.data_classes.common import (
+ BaseProxyEvent,
+ DictWrapper,
+)
class ALBEventRequestContext(DictWrapper):
diff --git a/aws_lambda_powertools/utilities/data_classes/appsync_resolver_event.py b/aws_lambda_powertools/utilities/data_classes/appsync_resolver_event.py
index 89f774293e7..30cd497e514 100644
--- a/aws_lambda_powertools/utilities/data_classes/appsync_resolver_event.py
+++ b/aws_lambda_powertools/utilities/data_classes/appsync_resolver_event.py
@@ -1,6 +1,9 @@
from typing import Any, Dict, List, Optional, Union
-from aws_lambda_powertools.utilities.data_classes.common import DictWrapper, get_header_value
+from aws_lambda_powertools.utilities.data_classes.common import (
+ DictWrapper,
+ get_header_value,
+)
def get_identity_object(identity: Optional[dict]) -> Any:
diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py
index 2109ee3dd3e..1b671489cdd 100644
--- a/aws_lambda_powertools/utilities/data_classes/common.py
+++ b/aws_lambda_powertools/utilities/data_classes/common.py
@@ -1,9 +1,10 @@
import base64
import json
-from typing import Any, Dict, Optional
+from collections.abc import Mapping
+from typing import Any, Dict, Iterator, Optional
-class DictWrapper:
+class DictWrapper(Mapping):
"""Provides a single read only access to a wrapper dict"""
def __init__(self, data: Dict[str, Any]):
@@ -19,6 +20,12 @@ def __eq__(self, other: Any) -> bool:
return self._data == other._data
+ def __iter__(self) -> Iterator:
+ return iter(self._data)
+
+ def __len__(self) -> int:
+ return len(self._data)
+
def get(self, key: str, default: Optional[Any] = None) -> Optional[Any]:
return self._data.get(key, default)
diff --git a/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py
index 7e209fab3e2..eb674c86b60 100644
--- a/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py
+++ b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py
@@ -182,8 +182,10 @@ def approximate_creation_date_time(self) -> Optional[int]:
item = self.get("ApproximateCreationDateTime")
return None if item is None else int(item)
+ # NOTE: This override breaks the Mapping protocol of DictWrapper, it's left here for backwards compatibility with
+ # a 'type: ignore' comment. See #1516 for discussion
@property
- def keys(self) -> Optional[Dict[str, AttributeValue]]:
+ def keys(self) -> Optional[Dict[str, AttributeValue]]: # type: ignore[override]
"""The primary key attribute(s) for the DynamoDB item that was modified."""
return _attribute_value_dict(self._data, "Keys")
diff --git a/aws_lambda_powertools/utilities/data_classes/lambda_function_url_event.py b/aws_lambda_powertools/utilities/data_classes/lambda_function_url_event.py
index 01c1a83f5db..beaf16b345c 100644
--- a/aws_lambda_powertools/utilities/data_classes/lambda_function_url_event.py
+++ b/aws_lambda_powertools/utilities/data_classes/lambda_function_url_event.py
@@ -1,4 +1,6 @@
-from aws_lambda_powertools.utilities.data_classes.api_gateway_proxy_event import APIGatewayProxyEventV2
+from aws_lambda_powertools.utilities.data_classes.api_gateway_proxy_event import (
+ APIGatewayProxyEventV2,
+)
class LambdaFunctionUrlEvent(APIGatewayProxyEventV2):
diff --git a/aws_lambda_powertools/utilities/data_classes/s3_object_event.py b/aws_lambda_powertools/utilities/data_classes/s3_object_event.py
index d4f97b725bf..45985120698 100644
--- a/aws_lambda_powertools/utilities/data_classes/s3_object_event.py
+++ b/aws_lambda_powertools/utilities/data_classes/s3_object_event.py
@@ -1,6 +1,9 @@
from typing import Dict, Optional
-from aws_lambda_powertools.utilities.data_classes.common import DictWrapper, get_header_value
+from aws_lambda_powertools.utilities.data_classes.common import (
+ DictWrapper,
+ get_header_value,
+)
class S3ObjectContext(DictWrapper):
diff --git a/aws_lambda_powertools/utilities/feature_flags/appconfig.py b/aws_lambda_powertools/utilities/feature_flags/appconfig.py
index dd581df9e22..8c8dbacc6c5 100644
--- a/aws_lambda_powertools/utilities/feature_flags/appconfig.py
+++ b/aws_lambda_powertools/utilities/feature_flags/appconfig.py
@@ -5,7 +5,11 @@
from botocore.config import Config
from aws_lambda_powertools.utilities import jmespath_utils
-from aws_lambda_powertools.utilities.parameters import AppConfigProvider, GetParameterError, TransformParameterError
+from aws_lambda_powertools.utilities.parameters import (
+ AppConfigProvider,
+ GetParameterError,
+ TransformParameterError,
+)
from ... import Logger
from .base import StoreProvider
diff --git a/aws_lambda_powertools/utilities/idempotency/__init__.py b/aws_lambda_powertools/utilities/idempotency/__init__.py
index 4461453a8be..148b291ea6d 100644
--- a/aws_lambda_powertools/utilities/idempotency/__init__.py
+++ b/aws_lambda_powertools/utilities/idempotency/__init__.py
@@ -2,8 +2,12 @@
Utility for adding idempotency to lambda functions
"""
-from aws_lambda_powertools.utilities.idempotency.persistence.base import BasePersistenceLayer
-from aws_lambda_powertools.utilities.idempotency.persistence.dynamodb import DynamoDBPersistenceLayer
+from aws_lambda_powertools.utilities.idempotency.persistence.base import (
+ BasePersistenceLayer,
+)
+from aws_lambda_powertools.utilities.idempotency.persistence.dynamodb import (
+ DynamoDBPersistenceLayer,
+)
from .idempotency import IdempotencyConfig, idempotent, idempotent_function
diff --git a/aws_lambda_powertools/utilities/idempotency/idempotency.py b/aws_lambda_powertools/utilities/idempotency/idempotency.py
index 646fd68558f..abd45a86be1 100644
--- a/aws_lambda_powertools/utilities/idempotency/idempotency.py
+++ b/aws_lambda_powertools/utilities/idempotency/idempotency.py
@@ -11,7 +11,9 @@
from aws_lambda_powertools.shared.types import AnyCallableT
from aws_lambda_powertools.utilities.idempotency.base import IdempotencyHandler
from aws_lambda_powertools.utilities.idempotency.config import IdempotencyConfig
-from aws_lambda_powertools.utilities.idempotency.persistence.base import BasePersistenceLayer
+from aws_lambda_powertools.utilities.idempotency.persistence.base import (
+ BasePersistenceLayer,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
logger = logging.getLogger(__name__)
diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py
index 90cbd853e8a..5d4d999ae1d 100644
--- a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py
+++ b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py
@@ -12,7 +12,10 @@
IdempotencyItemAlreadyExistsError,
IdempotencyItemNotFoundError,
)
-from aws_lambda_powertools.utilities.idempotency.persistence.base import STATUS_CONSTANTS, DataRecord
+from aws_lambda_powertools.utilities.idempotency.persistence.base import (
+ STATUS_CONSTANTS,
+ DataRecord,
+)
logger = logging.getLogger(__name__)
diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py
index 6d403019181..52059cb9ee7 100644
--- a/aws_lambda_powertools/utilities/parser/models/__init__.py
+++ b/aws_lambda_powertools/utilities/parser/models/__init__.py
@@ -14,11 +14,29 @@
RequestContextV2AuthorizerJwt,
RequestContextV2Http,
)
-from .cloudwatch import CloudWatchLogsData, CloudWatchLogsDecode, CloudWatchLogsLogEvent, CloudWatchLogsModel
-from .dynamodb import DynamoDBStreamChangedRecordModel, DynamoDBStreamModel, DynamoDBStreamRecordModel
+from .cloudwatch import (
+ CloudWatchLogsData,
+ CloudWatchLogsDecode,
+ CloudWatchLogsLogEvent,
+ CloudWatchLogsModel,
+)
+from .dynamodb import (
+ DynamoDBStreamChangedRecordModel,
+ DynamoDBStreamModel,
+ DynamoDBStreamRecordModel,
+)
from .event_bridge import EventBridgeModel
-from .kafka import KafkaBaseEventModel, KafkaMskEventModel, KafkaRecordModel, KafkaSelfManagedEventModel
-from .kinesis import KinesisDataStreamModel, KinesisDataStreamRecord, KinesisDataStreamRecordPayload
+from .kafka import (
+ KafkaBaseEventModel,
+ KafkaMskEventModel,
+ KafkaRecordModel,
+ KafkaSelfManagedEventModel,
+)
+from .kinesis import (
+ KinesisDataStreamModel,
+ KinesisDataStreamRecord,
+ KinesisDataStreamRecordPayload,
+)
from .lambda_function_url import LambdaFunctionUrlModel
from .s3 import S3Model, S3RecordModel
from .s3_object_event import (
diff --git a/aws_lambda_powertools/utilities/parser/parser.py b/aws_lambda_powertools/utilities/parser/parser.py
index ef939cd11f7..9cb0c00f73a 100644
--- a/aws_lambda_powertools/utilities/parser/parser.py
+++ b/aws_lambda_powertools/utilities/parser/parser.py
@@ -1,7 +1,11 @@
import logging
from typing import Any, Callable, Dict, Optional, Type, overload
-from aws_lambda_powertools.utilities.parser.types import EnvelopeModel, EventParserReturnType, Model
+from aws_lambda_powertools.utilities.parser.types import (
+ EnvelopeModel,
+ EventParserReturnType,
+ Model,
+)
from ...middleware_factory import lambda_handler_decorator
from ..typing import LambdaContext
diff --git a/aws_lambda_powertools/utilities/typing/lambda_client_context.py b/aws_lambda_powertools/utilities/typing/lambda_client_context.py
index 5b9e9506b4c..5c95e385ec5 100644
--- a/aws_lambda_powertools/utilities/typing/lambda_client_context.py
+++ b/aws_lambda_powertools/utilities/typing/lambda_client_context.py
@@ -1,7 +1,9 @@
# -*- coding: utf-8 -*-
from typing import Any, Dict
-from aws_lambda_powertools.utilities.typing.lambda_client_context_mobile_client import LambdaClientContextMobileClient
+from aws_lambda_powertools.utilities.typing.lambda_client_context_mobile_client import (
+ LambdaClientContextMobileClient,
+)
class LambdaClientContext(object):
diff --git a/aws_lambda_powertools/utilities/typing/lambda_context.py b/aws_lambda_powertools/utilities/typing/lambda_context.py
index b132fe413bc..ffa983f3711 100644
--- a/aws_lambda_powertools/utilities/typing/lambda_context.py
+++ b/aws_lambda_powertools/utilities/typing/lambda_context.py
@@ -1,6 +1,10 @@
# -*- coding: utf-8 -*-
-from aws_lambda_powertools.utilities.typing.lambda_client_context import LambdaClientContext
-from aws_lambda_powertools.utilities.typing.lambda_cognito_identity import LambdaCognitoIdentity
+from aws_lambda_powertools.utilities.typing.lambda_client_context import (
+ LambdaClientContext,
+)
+from aws_lambda_powertools.utilities.typing.lambda_cognito_identity import (
+ LambdaCognitoIdentity,
+)
class LambdaContext(object):
diff --git a/aws_lambda_powertools/utilities/validation/__init__.py b/aws_lambda_powertools/utilities/validation/__init__.py
index 94706e3214d..45d076ff207 100644
--- a/aws_lambda_powertools/utilities/validation/__init__.py
+++ b/aws_lambda_powertools/utilities/validation/__init__.py
@@ -2,7 +2,11 @@
Simple validator to enforce incoming/outgoing event conforms with JSON Schema
"""
-from .exceptions import InvalidEnvelopeExpressionError, InvalidSchemaFormatError, SchemaValidationError
+from .exceptions import (
+ InvalidEnvelopeExpressionError,
+ InvalidSchemaFormatError,
+ SchemaValidationError,
+)
from .validator import validate, validator
__all__ = [
diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md
index f4f45a051f8..10aaa9faeb9 100644
--- a/docs/core/event_handler/api_gateway.md
+++ b/docs/core/event_handler/api_gateway.md
@@ -214,7 +214,7 @@ By default, we return `404` for any unmatched route.
You can use **`not_found`** decorator to override this behavior, and return a custom **`Response`**.
-```python hl_lines="14 18" title="Handling not found"
+```python hl_lines="18 22" title="Handling not found"
--8<-- "examples/event_handler_rest/src/not_found_routes.py"
```
@@ -222,7 +222,7 @@ You can use **`not_found`** decorator to override this behavior, and return a cu
You can use **`exception_handler`** decorator with any Python exception. This allows you to handle a common exception outside your route, for example validation errors.
-```python hl_lines="13-14" title="Exception handling"
+```python hl_lines="17-18" title="Exception handling"
--8<-- "examples/event_handler_rest/src/exception_handling.py"
```
@@ -316,7 +316,7 @@ You can use the `Response` class to have full control over the response, for exa
=== "fine_grained_responses.py"
- ```python hl_lines="7 24-28"
+ ```python hl_lines="9 28-32"
--8<-- "examples/event_handler_rest/src/fine_grained_responses.py"
```
@@ -364,7 +364,7 @@ Like `compress` feature, the client must send the `Accept` header with the corre
Lambda Function URLs handle binary media types automatically.
=== "binary_responses.py"
- ```python hl_lines="14 20"
+ ```python hl_lines="17 23"
--8<-- "examples/event_handler_rest/src/binary_responses.py"
```
@@ -388,7 +388,7 @@ Like `compress` feature, the client must send the `Accept` header with the corre
### Debug mode
-You can enable debug mode via `debug` param, or via `POWERTOOLS_EVENT_HANDLER_DEBUG` [environment variable](../../index.md#environment-variables).
+You can enable debug mode via `debug` param, or via `POWERTOOLS_DEV` [environment variable](../../index.md#environment-variables).
This will enable full tracebacks errors in the response, print request and responses, and set CORS in development mode.
@@ -449,6 +449,28 @@ When necessary, you can set a prefix when including a router object. This means
--8<-- "examples/event_handler_rest/src/split_route_prefix_module.py"
```
+#### Sharing contextual data
+
+You can use `append_context` when you want to share data between your App and Router instances. Any data you share will be available via the `context` dictionary available in your App or Router context.
+
+???+ info
+ For safety, we always clear any data available in the `context` dictionary after each invocation.
+
+???+ tip
+ This can also be useful for middlewares injecting contextual information before a request is processed.
+
+=== "split_route_append_context.py"
+
+ ```python hl_lines="18"
+ --8<-- "examples/event_handler_rest/src/split_route_append_context.py"
+ ```
+
+=== "split_route_append_context_module.py"
+
+ ```python hl_lines="16"
+ --8<-- "examples/event_handler_rest/src/split_route_append_context_module.py"
+ ```
+
#### Sample layout
This is a sample project layout for a monolithic function with routes split in different files (`/todos`, `/health`).
diff --git a/docs/core/event_handler/appsync.md b/docs/core/event_handler/appsync.md
index dd9d1dd2d63..2fe5896b273 100644
--- a/docs/core/event_handler/appsync.md
+++ b/docs/core/event_handler/appsync.md
@@ -185,7 +185,7 @@ You can subclass [AppSyncResolverEvent](../../utilities/data_classes.md#appsync-
=== "custom_models.py.py"
- ```python hl_lines="11 14 30-32 35-36 43 50"
+ ```python hl_lines="11 14 32-34 37-38 45 52"
--8<-- "examples/event_handler_graphql/src/custom_models.py"
```
@@ -226,6 +226,28 @@ Let's assume you have `split_operation.py` as your Lambda function entrypoint an
--8<-- "examples/event_handler_graphql/src/split_operation.py"
```
+#### Sharing contextual data
+
+You can use `append_context` when you want to share data between your App and Router instances. Any data you share will be available via the `context` dictionary available in your App or Router context.
+
+???+ info
+ For safety, we always clear any data available in the `context` dictionary after each invocation.
+
+???+ tip
+ This can also be useful for middlewares injecting contextual information before a request is processed.
+
+=== "split_route_append_context.py"
+
+ ```python hl_lines="17"
+ --8<-- "examples/event_handler_graphql/src/split_operation_append_context.py"
+ ```
+
+=== "split_route_append_context_module.py"
+
+ ```python hl_lines="29"
+ --8<-- "examples/event_handler_graphql/src/split_operation_append_context_module.py"
+ ```
+
## Testing your code
You can test your resolvers by passing a mocked or actual AppSync Lambda event that you're expecting.
@@ -259,7 +281,7 @@ And an example for testing asynchronous resolvers. Note that this requires the `
=== "assert_async_graphql_response.py"
- ```python hl_lines="28"
+ ```python hl_lines="31"
--8<-- "examples/event_handler_graphql/src/assert_async_graphql_response.py"
```
diff --git a/docs/core/logger.md b/docs/core/logger.md
index c699568b349..4b16a1eeb71 100644
--- a/docs/core/logger.md
+++ b/docs/core/logger.md
@@ -369,6 +369,9 @@ If you prefer configuring it separately, or you'd want to bring this JSON Format
| **`log_record_order`** | set order of log keys when logging | `["level", "location", "message", "timestamp"]` |
| **`kwargs`** | key-value to be included in log messages | `None` |
+???+ info
+ When `POWERTOOLS_DEV` env var is present and set to `"true"`, Logger's default serializer (`json.dumps`) will pretty-print log messages for easier readability.
+
```python hl_lines="2 7-8" title="Pre-configuring Lambda Powertools Formatter"
--8<-- "examples/logger/src/powertools_formatter_setup.py"
```
@@ -605,6 +608,9 @@ for the given name and level to the logging module. By default, this logs all bo
You can copy the Logger setup to all or sub-sets of registered external loggers. Use the `copy_config_to_registered_logger` method to do this.
+???+ tip
+ To help differentiate between loggers, we include the standard logger `name` attribute for all loggers we copied configuration to.
+
By default all registered loggers will be modified. You can change this behavior by providing `include` and `exclude` attributes. You can also provide optional `log_level` attribute external loggers will be configured with.
```python hl_lines="10" title="Cloning Logger config to all other registered standard loggers"
diff --git a/docs/index.md b/docs/index.md
index 78b76c7bed6..f831cf1d620 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -14,7 +14,7 @@ A suite of utilities for AWS Lambda functions to ease adopting best practices su
Powertools is available in the following formats:
-* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:33**](#){: .copyMe}:clipboard:
+* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:36**](#){: .copyMe}:clipboard:
* **PyPi**: **`pip install aws-lambda-powertools`**
???+ hint "Support this project by using Lambda Layers :heart:"
@@ -32,28 +32,28 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
| Region | Layer ARN |
| ---------------- | -------------------------------------------------------------------------------------------------------- |
- | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
- | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: |
+ | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
+ | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:36](#){: .copyMe}:clipboard: |
??? question "Can't find our Lambda Layer for your preferred AWS region?"
You can use [Serverless Application Repository (SAR)](#sar) method, our [CDK Layer Construct](https://github.com/aws-samples/cdk-lambda-powertools-python-layer){target="_blank"}, or PyPi like you normally would for any other library.
@@ -67,7 +67,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
Type: AWS::Serverless::Function
Properties:
Layers:
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:33
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:36
```
=== "Serverless framework"
@@ -77,7 +77,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
hello:
handler: lambda_function.lambda_handler
layers:
- - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:33
+ - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:36
```
=== "CDK"
@@ -93,7 +93,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn(
self,
id="lambda-powertools",
- layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:33"
+ layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:36"
)
aws_lambda.Function(self,
'sample-app-lambda',
@@ -142,7 +142,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
role = aws_iam_role.iam_for_lambda.arn
handler = "index.test"
runtime = "python3.9"
- layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:33"]
+ layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:36"]
source_code_hash = filebase64sha256("lambda_function_payload.zip")
}
@@ -161,7 +161,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
? Do you want to configure advanced settings? Yes
...
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:33
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:36
❯ amplify push -y
@@ -172,7 +172,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
- Name:
? Which setting do you want to update? Lambda layers configuration
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:33
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:36
? Do you want to edit the local lambda function now? No
```
@@ -180,7 +180,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
Change {region} to your AWS region, e.g. `eu-west-1`
```bash title="AWS CLI"
- aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:33 --region {region}
+ aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:36 --region {region}
```
The pre-signed URL to download this Lambda Layer will be within `Location` key.
@@ -438,31 +438,41 @@ Core utilities such as Tracing, Logging, Metrics, and Event Handler will be avai
???+ info
Explicit parameters take precedence over environment variables
-| Environment variable | Description | Utility | Default |
-| ----------------------------------------- | -------------------------------------------------------------------------------------- | --------------------------------------------------------------- | --------------------- |
-| **POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | All | `"service_undefined"` |
-| **POWERTOOLS_METRICS_NAMESPACE** | Sets namespace used for metrics | [Metrics](./core/metrics) | `None` |
-| **POWERTOOLS_TRACE_DISABLED** | Explicitly disables tracing | [Tracing](./core/tracer) | `false` |
-| **POWERTOOLS_TRACER_CAPTURE_RESPONSE** | Captures Lambda or method return as metadata. | [Tracing](./core/tracer) | `true` |
-| **POWERTOOLS_TRACER_CAPTURE_ERROR** | Captures Lambda or method exception as metadata. | [Tracing](./core/tracer) | `true` |
-| **POWERTOOLS_TRACE_MIDDLEWARES** | Creates sub-segment for each custom middleware | [Middleware factory](./utilities/middleware_factory) | `false` |
-| **POWERTOOLS_LOGGER_LOG_EVENT** | Logs incoming event | [Logging](./core/logger) | `false` |
-| **POWERTOOLS_LOGGER_SAMPLE_RATE** | Debug log sampling | [Logging](./core/logger) | `0` |
-| **POWERTOOLS_LOG_DEDUPLICATION_DISABLED** | Disables log deduplication filter protection to use Pytest Live Log feature | [Logging](./core/logger) | `false` |
-| **POWERTOOLS_EVENT_HANDLER_DEBUG** | Enables debugging mode for event handler | [Event Handler](./core/event_handler/api_gateway.md#debug-mode) | `false` |
-| **LOG_LEVEL** | Sets logging level | [Logging](./core/logger) | `INFO` |
+| Environment variable | Description | Utility | Default |
+| ----------------------------------------- | -------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | --------------------- |
+| **POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | All | `"service_undefined"` |
+| **POWERTOOLS_METRICS_NAMESPACE** | Sets namespace used for metrics | [Metrics](./core/metrics) | `None` |
+| **POWERTOOLS_TRACE_DISABLED** | Explicitly disables tracing | [Tracing](./core/tracer) | `false` |
+| **POWERTOOLS_TRACER_CAPTURE_RESPONSE** | Captures Lambda or method return as metadata. | [Tracing](./core/tracer) | `true` |
+| **POWERTOOLS_TRACER_CAPTURE_ERROR** | Captures Lambda or method exception as metadata. | [Tracing](./core/tracer) | `true` |
+| **POWERTOOLS_TRACE_MIDDLEWARES** | Creates sub-segment for each custom middleware | [Middleware factory](./utilities/middleware_factory) | `false` |
+| **POWERTOOLS_LOGGER_LOG_EVENT** | Logs incoming event | [Logging](./core/logger) | `false` |
+| **POWERTOOLS_LOGGER_SAMPLE_RATE** | Debug log sampling | [Logging](./core/logger) | `0` |
+| **POWERTOOLS_LOG_DEDUPLICATION_DISABLED** | Disables log deduplication filter protection to use Pytest Live Log feature | [Logging](./core/logger) | `false` |
+| **POWERTOOLS_EVENT_HANDLER_DEBUG** | Enables debugging mode for event handler | [Event Handler](./core/event_handler/api_gateway.md#debug-mode) | `false` |
+| **POWERTOOLS_DEV** | Increases verbosity across utilities | Multiple; see [POWERTOOLS_DEV effect below](#increasing-verbosity-across-utilities) | `0` |
+| **LOG_LEVEL** | Sets logging level | [Logging](./core/logger) | `INFO` |
+
+### Optimizing for non-production environments
+
+Whether you're prototyping locally or against a non-production environment, you can use `POWERTOOLS_DEV` to increase verbosity across multiple utilities.
-## Debug mode
+???+ info
+ We will emit a warning when `POWERTOOLS_DEV` is enabled to help you detect misuse in production environments.
-As a best practice, AWS Lambda Powertools module logging statements are suppressed. If necessary, you can enable debugging using `set_package_logger` for additional information on every internal operation:
+When `POWERTOOLS_DEV` is set to a truthy value (`1`, `true`), it'll have the following effects:
-```python title="Powertools debug mode example"
-from aws_lambda_powertools.logging.logger import set_package_logger
+| Utility | Effect |
+| ----------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| **Logger** | Increase JSON indentation to 4. This will ease local debugging when running functions locally under emulators or direct calls while not affecting unit tests |
+| **Event Handler** | Enable full traceback errors in the response, indent request/responses, and CORS in dev mode (`*`). This will deprecate [`POWERTOOLS_EVENT_HANDLER_DEBUG`](https://awslabs.github.io/aws-lambda-powertools-python/latest/core/event_handler/api_gateway/#debug-mode) in the future. |
+| **Tracer** | Future-proof safety to disables tracing operations in non-Lambda environments. This already happens automatically in the Tracer utility. |
-set_package_logger() # (1)
-```
+## Debug mode
+
+As a best practice for libraries, AWS Lambda Powertools module logging statements are suppressed.
-1. :information_source: this will configure our `aws_lambda_powertools` logger with debug.
+When necessary, you can use `POWERTOOLS_DEBUG` environment variable to enable debugging. This will provide additional information on every internal operation.
## Tenets
diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md
index 6241179ed4e..c429ac24693 100644
--- a/docs/utilities/batch.md
+++ b/docs/utilities/batch.md
@@ -833,6 +833,73 @@ def lambda_handler(event, context: LambdaContext):
return processor.response()
```
+### Accessing Lambda Context
+
+Within your `record_handler` function, you might need access to the Lambda context to determine how much time you have left before your function times out.
+
+We can automatically inject the [Lambda context](https://docs.aws.amazon.com/lambda/latest/dg/python-context.html){target="_blank"} into your `record_handler` if your function signature has a parameter named `lambda_context`. When using a context manager, you also need to pass the Lambda context object like in the example below.
+
+=== "As a decorator"
+
+ ```python hl_lines="15"
+ from typing import Optional
+
+ from aws_lambda_powertools import Logger, Tracer
+ from aws_lambda_powertools.utilities.batch import (BatchProcessor, EventType,
+ batch_processor)
+ from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+
+ processor = BatchProcessor(event_type=EventType.SQS)
+ tracer = Tracer()
+ logger = Logger()
+
+
+ @tracer.capture_method
+ def record_handler(record: SQSRecord, lambda_context: Optional[LambdaContext] = None):
+ if lambda_context is not None:
+ remaining_time = lambda_context.get_remaining_time_in_millis()
+ ...
+
+
+ @logger.inject_lambda_context
+ @tracer.capture_lambda_handler
+ @batch_processor(record_handler=record_handler, processor=processor)
+ def lambda_handler(event, context: LambdaContext):
+ return processor.response()
+ ```
+
+=== "As a context manager"
+
+ ```python hl_lines="14 23"
+ from typing import Optional
+
+ from aws_lambda_powertools import Logger, Tracer
+ from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType
+ from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+
+ processor = BatchProcessor(event_type=EventType.SQS)
+ tracer = Tracer()
+ logger = Logger()
+
+
+ @tracer.capture_method
+ def record_handler(record: SQSRecord, lambda_context: Optional[LambdaContext] = None):
+ if lambda_context is not None:
+ remaining_time = lambda_context.get_remaining_time_in_millis()
+ ...
+
+ @logger.inject_lambda_context
+ @tracer.capture_lambda_handler
+ def lambda_handler(event, context: LambdaContext):
+ batch = event["Records"]
+ with processor(records=batch, handler=record_handler, lambda_context=context):
+ result = processor.process()
+
+ return result
+ ```
+
### Extending BatchProcessor
You might want to bring custom logic to the existing `BatchProcessor` to slightly override how we handle successes and failures.
diff --git a/docs/utilities/jmespath_functions.md b/docs/utilities/jmespath_functions.md
index 209bf4fffe9..a01a72ced16 100644
--- a/docs/utilities/jmespath_functions.md
+++ b/docs/utilities/jmespath_functions.md
@@ -52,7 +52,7 @@ We provide built-in envelopes for popular AWS Lambda event sources to easily dec
=== "extract_data_from_builtin_envelope.py"
- ```python hl_lines="1 6"
+ ```python hl_lines="1-4 9"
--8<-- "examples/jmespath_functions/src/extract_data_from_builtin_envelope.py"
```
@@ -116,7 +116,7 @@ This sample will deserialize the JSON string within the `body` key before [Idemp
=== "powertools_json_idempotency_jmespath.py"
- ```python hl_lines="12"
+ ```python hl_lines="16"
--8<-- "examples/jmespath_functions/src/powertools_json_idempotency_jmespath.py"
```
@@ -185,7 +185,7 @@ Here is an example of how to decompress messages using [snappy](https://github.c
=== "powertools_custom_jmespath_function.py"
- ```python hl_lines="8 11 14-15 20 31 36 38 40"
+ ```python hl_lines="9 14 17-18 23 34 39 41 43"
--8<-- "examples/jmespath_functions/src/powertools_custom_jmespath_function.py"
```
diff --git a/docs/utilities/middleware_factory.md b/docs/utilities/middleware_factory.md
index 70157ca1286..4d125b3c006 100644
--- a/docs/utilities/middleware_factory.md
+++ b/docs/utilities/middleware_factory.md
@@ -30,7 +30,7 @@ You can create your own middleware using `lambda_handler_decorator`. The decorat
### Middleware with before logic
=== "getting_started_middleware_before_logic_function.py"
- ```python hl_lines="5 23 24 29 30 32 37 38"
+ ```python hl_lines="5 26 27 32 33 35 40 41"
--8<-- "examples/middleware_factory/src/getting_started_middleware_before_logic_function.py"
```
@@ -58,7 +58,7 @@ You can create your own middleware using `lambda_handler_decorator`. The decorat
You can also have your own keyword arguments after the mandatory arguments.
=== "getting_started_middleware_with_params_function.py"
- ```python hl_lines="6 27 28 29 33 49"
+ ```python hl_lines="6 30 31 32 36 52"
--8<-- "examples/middleware_factory/src/getting_started_middleware_with_params_function.py"
```
diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md
index e97395ae56c..cdcb949d28a 100644
--- a/docs/utilities/parser.md
+++ b/docs/utilities/parser.md
@@ -171,7 +171,7 @@ Parser comes with the following built-in models:
| **KafkaSelfManagedEventModel** | Lambda Event Source payload for self managed Kafka payload |
| **KafkaMskEventModel** | Lambda Event Source payload for AWS MSK payload |
-### extending built-in models
+### Extending built-in models
You can extend them to include your own models, and yet have all other known fields parsed along the way.
@@ -236,6 +236,20 @@ for order_item in ret.detail.items:
3. Defined how part of our EventBridge event should look like by overriding `detail` key within our `OrderEventModel`
4. Parser parsed the original event against `OrderEventModel`
+???+ tip
+ When extending a `string` field containing JSON, you need to wrap the field
+ with [Pydantic's Json Type](https://pydantic-docs.helpmanual.io/usage/types/#json-type):
+
+ ```python hl_lines="14 18-19"
+ --8<-- "examples/parser/src/extending_built_in_models_with_json_mypy.py"
+ ```
+
+ Alternatively, you could use a [Pydantic validator](https://pydantic-docs.helpmanual.io/usage/validators/) to transform the JSON string into a dict before the mapping:
+
+ ```python hl_lines="18-20 24-25"
+ --8<-- "examples/parser/src/extending_built_in_models_with_json_validator.py"
+ ```
+
## Envelopes
When trying to parse your payloads wrapped in a known structure, you might encounter the following situations:
diff --git a/docs/utilities/validation.md b/docs/utilities/validation.md
index c9cd5813086..3b61fececd4 100644
--- a/docs/utilities/validation.md
+++ b/docs/utilities/validation.md
@@ -91,7 +91,7 @@ Here is a sample custom EventBridge event, where we only validate what's inside
=== "getting_started_validator_unwrapping_function.py"
- ```python hl_lines="2 6 12"
+ ```python hl_lines="2 8 14"
--8<-- "examples/validation/src/getting_started_validator_unwrapping_function.py"
```
@@ -117,7 +117,7 @@ We provide built-in envelopes to easily extract the payload from popular event s
=== "unwrapping_popular_event_source_function.py"
- ```python hl_lines="2 7 12"
+ ```python hl_lines="2 9 14"
--8<-- "examples/validation/src/unwrapping_popular_event_source_function.py"
```
diff --git a/examples/event_handler_graphql/src/assert_async_graphql_response.py b/examples/event_handler_graphql/src/assert_async_graphql_response.py
index e85816f2ca1..bb1b429c43c 100644
--- a/examples/event_handler_graphql/src/assert_async_graphql_response.py
+++ b/examples/event_handler_graphql/src/assert_async_graphql_response.py
@@ -4,7 +4,10 @@
from typing import List
import pytest
-from assert_async_graphql_response_module import Todo, app # instance of AppSyncResolver
+from assert_async_graphql_response_module import ( # instance of AppSyncResolver
+ Todo,
+ app,
+)
@pytest.fixture
diff --git a/examples/event_handler_graphql/src/custom_models.py b/examples/event_handler_graphql/src/custom_models.py
index 594ef5ee248..6d82e1ba9be 100644
--- a/examples/event_handler_graphql/src/custom_models.py
+++ b/examples/event_handler_graphql/src/custom_models.py
@@ -11,7 +11,9 @@
from aws_lambda_powertools.event_handler import AppSyncResolver
from aws_lambda_powertools.logging import correlation_paths
from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils
-from aws_lambda_powertools.utilities.data_classes.appsync_resolver_event import AppSyncResolverEvent
+from aws_lambda_powertools.utilities.data_classes.appsync_resolver_event import (
+ AppSyncResolverEvent,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
tracer = Tracer()
diff --git a/examples/event_handler_graphql/src/split_operation_append_context.py b/examples/event_handler_graphql/src/split_operation_append_context.py
new file mode 100644
index 00000000000..6cd28c259f0
--- /dev/null
+++ b/examples/event_handler_graphql/src/split_operation_append_context.py
@@ -0,0 +1,18 @@
+import split_operation_append_context_module
+
+from aws_lambda_powertools import Logger, Tracer
+from aws_lambda_powertools.event_handler import AppSyncResolver
+from aws_lambda_powertools.logging import correlation_paths
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+tracer = Tracer()
+logger = Logger()
+app = AppSyncResolver()
+app.include_router(split_operation_append_context_module.router)
+
+
+@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER)
+@tracer.capture_lambda_handler
+def lambda_handler(event: dict, context: LambdaContext) -> dict:
+ app.append_context(is_admin=True) # arbitrary number of key=value data
+ return app.resolve(event, context)
diff --git a/examples/event_handler_graphql/src/split_operation_append_context_module.py b/examples/event_handler_graphql/src/split_operation_append_context_module.py
new file mode 100644
index 00000000000..e30e345c313
--- /dev/null
+++ b/examples/event_handler_graphql/src/split_operation_append_context_module.py
@@ -0,0 +1,30 @@
+import sys
+
+if sys.version_info >= (3, 8):
+ from typing import TypedDict
+else:
+ from typing_extensions import TypedDict
+
+from typing import List
+
+from aws_lambda_powertools import Logger, Tracer
+from aws_lambda_powertools.event_handler.appsync import Router
+
+tracer = Tracer()
+logger = Logger()
+router = Router()
+
+
+class Location(TypedDict, total=False):
+ id: str # noqa AA03 VNE003, required due to GraphQL Schema
+ name: str
+ description: str
+ address: str
+
+
+@router.resolver(field_name="listLocations")
+@router.resolver(field_name="locations")
+@tracer.capture_method
+def get_locations(name: str, description: str = "") -> List[Location]: # match GraphQL Query arguments
+ is_admin: bool = router.context.get("is_admin", False)
+ return [{"name": name, "description": description}] if is_admin else []
diff --git a/examples/event_handler_rest/src/binary_responses.py b/examples/event_handler_rest/src/binary_responses.py
index d56eda1afe8..f91dc879402 100644
--- a/examples/event_handler_rest/src/binary_responses.py
+++ b/examples/event_handler_rest/src/binary_responses.py
@@ -2,7 +2,10 @@
from pathlib import Path
from aws_lambda_powertools import Logger, Tracer
-from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response
+from aws_lambda_powertools.event_handler.api_gateway import (
+ APIGatewayRestResolver,
+ Response,
+)
from aws_lambda_powertools.logging import correlation_paths
from aws_lambda_powertools.utilities.typing import LambdaContext
diff --git a/examples/event_handler_rest/src/exception_handling.py b/examples/event_handler_rest/src/exception_handling.py
index 89a31e60bf1..ea325bd6dc1 100644
--- a/examples/event_handler_rest/src/exception_handling.py
+++ b/examples/event_handler_rest/src/exception_handling.py
@@ -1,7 +1,11 @@
import requests
from aws_lambda_powertools import Logger, Tracer
-from aws_lambda_powertools.event_handler import APIGatewayRestResolver, Response, content_types
+from aws_lambda_powertools.event_handler import (
+ APIGatewayRestResolver,
+ Response,
+ content_types,
+)
from aws_lambda_powertools.logging import correlation_paths
from aws_lambda_powertools.utilities.typing import LambdaContext
diff --git a/examples/event_handler_rest/src/fine_grained_responses.py b/examples/event_handler_rest/src/fine_grained_responses.py
index 3e477160307..15c70cd282b 100644
--- a/examples/event_handler_rest/src/fine_grained_responses.py
+++ b/examples/event_handler_rest/src/fine_grained_responses.py
@@ -4,7 +4,11 @@
import requests
from aws_lambda_powertools import Logger, Tracer
-from aws_lambda_powertools.event_handler import APIGatewayRestResolver, Response, content_types
+from aws_lambda_powertools.event_handler import (
+ APIGatewayRestResolver,
+ Response,
+ content_types,
+)
from aws_lambda_powertools.logging import correlation_paths
from aws_lambda_powertools.utilities.typing import LambdaContext
diff --git a/examples/event_handler_rest/src/not_found_routes.py b/examples/event_handler_rest/src/not_found_routes.py
index 889880292c0..1bf378e5e62 100644
--- a/examples/event_handler_rest/src/not_found_routes.py
+++ b/examples/event_handler_rest/src/not_found_routes.py
@@ -1,7 +1,11 @@
import requests
from aws_lambda_powertools import Logger, Tracer
-from aws_lambda_powertools.event_handler import APIGatewayRestResolver, Response, content_types
+from aws_lambda_powertools.event_handler import (
+ APIGatewayRestResolver,
+ Response,
+ content_types,
+)
from aws_lambda_powertools.event_handler.exceptions import NotFoundError
from aws_lambda_powertools.logging import correlation_paths
from aws_lambda_powertools.utilities.typing import LambdaContext
diff --git a/examples/event_handler_rest/src/split_route_append_context.py b/examples/event_handler_rest/src/split_route_append_context.py
new file mode 100644
index 00000000000..dd012c61db8
--- /dev/null
+++ b/examples/event_handler_rest/src/split_route_append_context.py
@@ -0,0 +1,19 @@
+import split_route_append_context_module
+
+from aws_lambda_powertools import Logger, Tracer
+from aws_lambda_powertools.event_handler import APIGatewayRestResolver
+from aws_lambda_powertools.logging import correlation_paths
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+tracer = Tracer()
+logger = Logger()
+app = APIGatewayRestResolver()
+app.include_router(split_route_append_context_module.router)
+
+
+# You can continue to use other utilities just as before
+@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST)
+@tracer.capture_lambda_handler
+def lambda_handler(event: dict, context: LambdaContext) -> dict:
+ app.append_context(is_admin=True) # arbitrary number of key=value data
+ return app.resolve(event, context)
diff --git a/examples/event_handler_rest/src/split_route_append_context_module.py b/examples/event_handler_rest/src/split_route_append_context_module.py
new file mode 100644
index 00000000000..0b9a0cd5fa0
--- /dev/null
+++ b/examples/event_handler_rest/src/split_route_append_context_module.py
@@ -0,0 +1,25 @@
+import requests
+from requests import Response
+
+from aws_lambda_powertools import Tracer
+from aws_lambda_powertools.event_handler.api_gateway import Router
+
+tracer = Tracer()
+router = Router()
+
+endpoint = "https://jsonplaceholder.typicode.com/todos"
+
+
+@router.get("/todos")
+@tracer.capture_method
+def get_todos():
+ is_admin: bool = router.context.get("is_admin", False)
+ todos = {}
+
+ if is_admin:
+ todos: Response = requests.get(endpoint)
+ todos.raise_for_status()
+ todos = todos.json()[:10]
+
+ # for brevity, we'll limit to the first 10 only
+ return {"todos": todos}
diff --git a/examples/jmespath_functions/src/extract_data_from_builtin_envelope.py b/examples/jmespath_functions/src/extract_data_from_builtin_envelope.py
index 53c230e1b9b..31ae6cf268c 100644
--- a/examples/jmespath_functions/src/extract_data_from_builtin_envelope.py
+++ b/examples/jmespath_functions/src/extract_data_from_builtin_envelope.py
@@ -1,4 +1,7 @@
-from aws_lambda_powertools.utilities.jmespath_utils import envelopes, extract_data_from_envelope
+from aws_lambda_powertools.utilities.jmespath_utils import (
+ envelopes,
+ extract_data_from_envelope,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
diff --git a/examples/jmespath_functions/src/powertools_custom_jmespath_function.py b/examples/jmespath_functions/src/powertools_custom_jmespath_function.py
index 71fdecd0db2..cd7b85b8115 100644
--- a/examples/jmespath_functions/src/powertools_custom_jmespath_function.py
+++ b/examples/jmespath_functions/src/powertools_custom_jmespath_function.py
@@ -5,7 +5,10 @@
from jmespath.exceptions import JMESPathTypeError
from jmespath.functions import signature
-from aws_lambda_powertools.utilities.jmespath_utils import PowertoolsFunctions, extract_data_from_envelope
+from aws_lambda_powertools.utilities.jmespath_utils import (
+ PowertoolsFunctions,
+ extract_data_from_envelope,
+)
class CustomFunctions(PowertoolsFunctions):
diff --git a/examples/jmespath_functions/src/powertools_json_idempotency_jmespath.py b/examples/jmespath_functions/src/powertools_json_idempotency_jmespath.py
index aaf5724b54b..776d5485741 100644
--- a/examples/jmespath_functions/src/powertools_json_idempotency_jmespath.py
+++ b/examples/jmespath_functions/src/powertools_json_idempotency_jmespath.py
@@ -3,7 +3,11 @@
import requests
-from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent
+from aws_lambda_powertools.utilities.idempotency import (
+ DynamoDBPersistenceLayer,
+ IdempotencyConfig,
+ idempotent,
+)
persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable")
diff --git a/examples/middleware_factory/src/getting_started_middleware_before_logic_function.py b/examples/middleware_factory/src/getting_started_middleware_before_logic_function.py
index 7d5ee035e7b..451e506391a 100644
--- a/examples/middleware_factory/src/getting_started_middleware_before_logic_function.py
+++ b/examples/middleware_factory/src/getting_started_middleware_before_logic_function.py
@@ -3,7 +3,10 @@
from uuid import uuid4
from aws_lambda_powertools.middleware_factory import lambda_handler_decorator
-from aws_lambda_powertools.utilities.jmespath_utils import envelopes, extract_data_from_envelope
+from aws_lambda_powertools.utilities.jmespath_utils import (
+ envelopes,
+ extract_data_from_envelope,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
diff --git a/examples/middleware_factory/src/getting_started_middleware_with_params_function.py b/examples/middleware_factory/src/getting_started_middleware_with_params_function.py
index ce800e9162f..ebede4efe36 100644
--- a/examples/middleware_factory/src/getting_started_middleware_with_params_function.py
+++ b/examples/middleware_factory/src/getting_started_middleware_with_params_function.py
@@ -4,7 +4,10 @@
from uuid import uuid4
from aws_lambda_powertools.middleware_factory import lambda_handler_decorator
-from aws_lambda_powertools.utilities.jmespath_utils import envelopes, extract_data_from_envelope
+from aws_lambda_powertools.utilities.jmespath_utils import (
+ envelopes,
+ extract_data_from_envelope,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
diff --git a/examples/parser/src/extending_built_in_models_with_json_mypy.py b/examples/parser/src/extending_built_in_models_with_json_mypy.py
new file mode 100644
index 00000000000..80314a814ce
--- /dev/null
+++ b/examples/parser/src/extending_built_in_models_with_json_mypy.py
@@ -0,0 +1,21 @@
+from pydantic import BaseModel, Json
+
+from aws_lambda_powertools.utilities.parser import event_parser
+from aws_lambda_powertools.utilities.parser.models import APIGatewayProxyEventV2Model
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+class CancelOrder(BaseModel):
+ order_id: int
+ reason: str
+
+
+class CancelOrderModel(APIGatewayProxyEventV2Model):
+ body: Json[CancelOrder] # type: ignore[type-arg]
+
+
+@event_parser(model=CancelOrderModel)
+def handler(event: CancelOrderModel, context: LambdaContext):
+ cancel_order: CancelOrder = event.body # type: ignore[assignment]
+
+ assert cancel_order.order_id is not None
diff --git a/examples/parser/src/extending_built_in_models_with_json_validator.py b/examples/parser/src/extending_built_in_models_with_json_validator.py
new file mode 100644
index 00000000000..acd4f3fc825
--- /dev/null
+++ b/examples/parser/src/extending_built_in_models_with_json_validator.py
@@ -0,0 +1,27 @@
+import json
+
+from pydantic import BaseModel, validator
+
+from aws_lambda_powertools.utilities.parser import event_parser
+from aws_lambda_powertools.utilities.parser.models import APIGatewayProxyEventV2Model
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+class CancelOrder(BaseModel):
+ order_id: int
+ reason: str
+
+
+class CancelOrderModel(APIGatewayProxyEventV2Model):
+ body: CancelOrder # type: ignore[assignment]
+
+ @validator("body", pre=True)
+ def transform_body_to_dict(cls, value: str):
+ return json.loads(value)
+
+
+@event_parser(model=CancelOrderModel)
+def handler(event: CancelOrderModel, context: LambdaContext):
+ cancel_order: CancelOrder = event.body
+
+ assert cancel_order.order_id is not None
diff --git a/examples/validation/src/getting_started_validator_unwrapping_function.py b/examples/validation/src/getting_started_validator_unwrapping_function.py
index 96c66a6f2d3..c1b48fd3a81 100644
--- a/examples/validation/src/getting_started_validator_unwrapping_function.py
+++ b/examples/validation/src/getting_started_validator_unwrapping_function.py
@@ -1,7 +1,9 @@
import boto3
import getting_started_validator_unwrapping_schema as schemas
-from aws_lambda_powertools.utilities.data_classes.event_bridge_event import EventBridgeEvent
+from aws_lambda_powertools.utilities.data_classes.event_bridge_event import (
+ EventBridgeEvent,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
from aws_lambda_powertools.utilities.validation import validator
diff --git a/examples/validation/src/unwrapping_popular_event_source_function.py b/examples/validation/src/unwrapping_popular_event_source_function.py
index 8afbb5c727f..a0c0c1a7000 100644
--- a/examples/validation/src/unwrapping_popular_event_source_function.py
+++ b/examples/validation/src/unwrapping_popular_event_source_function.py
@@ -2,7 +2,9 @@
import unwrapping_popular_event_source_schema as schemas
from botocore.exceptions import ClientError
-from aws_lambda_powertools.utilities.data_classes.event_bridge_event import EventBridgeEvent
+from aws_lambda_powertools.utilities.data_classes.event_bridge_event import (
+ EventBridgeEvent,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
from aws_lambda_powertools.utilities.validation import envelopes, validator
diff --git a/poetry.lock b/poetry.lock
index 49b4ade1195..e2ef496c5dc 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -127,7 +127,7 @@ python-versions = "*"
attrs = ">=17.3"
[package.extras]
-dev = ["bumpversion", "coverage", "flake8", "hypothesis", "pendulum", "pytest", "sphinx", "tox", "watchdog", "wheel"]
+dev = ["Sphinx", "bumpversion", "coverage", "flake8", "hypothesis", "pendulum", "pytest", "tox", "watchdog", "wheel"]
[[package]]
name = "cattrs"
@@ -241,8 +241,8 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"]
[[package]]
name = "email-validator"
-version = "1.2.1"
-description = "A robust email syntax and deliverability validation library."
+version = "1.3.0"
+description = "A robust email address syntax and deliverability validation library."
category = "main"
optional = true
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
@@ -283,7 +283,7 @@ testing = ["pre-commit"]
[[package]]
name = "fastjsonschema"
-version = "2.16.1"
+version = "2.16.2"
description = "Fastest Python implementation of JSON schema"
category = "main"
optional = false
@@ -320,7 +320,7 @@ pyflakes = ">=2.3.0,<2.4.0"
[[package]]
name = "flake8-bugbear"
-version = "22.9.11"
+version = "22.9.23"
description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
category = "dev"
optional = false
@@ -384,6 +384,7 @@ python-versions = ">=3.6,<4.0"
attrs = "*"
eradicate = ">=2.0,<3.0"
flake8 = ">=3.5,<6"
+setuptools = "*"
[[package]]
name = "flake8-fixme"
@@ -416,6 +417,9 @@ category = "dev"
optional = false
python-versions = "*"
+[package.dependencies]
+setuptools = "*"
+
[[package]]
name = "future"
version = "0.18.2"
@@ -564,8 +568,8 @@ python-dateutil = "*"
typing-extensions = ">=3.7,<5.0"
[[package]]
-name = "mako"
-version = "1.2.2"
+name = "Mako"
+version = "1.2.3"
description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
category = "dev"
optional = false
@@ -576,7 +580,7 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
MarkupSafe = ">=0.9.2"
[package.extras]
-babel = ["babel"]
+babel = ["Babel"]
lingua = ["lingua"]
testing = ["pytest"]
@@ -688,7 +692,7 @@ mkdocs = ">=0.17"
[[package]]
name = "mkdocs-material"
-version = "8.5.1"
+version = "8.5.4"
description = "Documentation that simply works"
category = "dev"
optional = false
@@ -798,8 +802,8 @@ typing-extensions = ">=4.1.0"
[[package]]
name = "mypy-boto3-s3"
-version = "1.24.36.post1"
-description = "Type annotations for boto3.S3 1.24.36 service generated with mypy-boto3-builder 7.10.0"
+version = "1.24.76"
+description = "Type annotations for boto3.S3 1.24.76 service generated with mypy-boto3-builder 7.11.9"
category = "dev"
optional = false
python-versions = ">=3.7"
@@ -809,8 +813,8 @@ typing-extensions = ">=4.1.0"
[[package]]
name = "mypy-boto3-secretsmanager"
-version = "1.24.54"
-description = "Type annotations for boto3.SecretsManager 1.24.54 service generated with mypy-boto3-builder 7.11.6"
+version = "1.24.83"
+description = "Type annotations for boto3.SecretsManager 1.24.83 service generated with mypy-boto3-builder 7.11.10"
category = "dev"
optional = false
python-versions = ">=3.7"
@@ -820,8 +824,8 @@ typing-extensions = ">=4.1.0"
[[package]]
name = "mypy-boto3-ssm"
-version = "1.24.69"
-description = "Type annotations for boto3.SSM 1.24.69 service generated with mypy-boto3-builder 7.11.8"
+version = "1.24.81"
+description = "Type annotations for boto3.SSM 1.24.81 service generated with mypy-boto3-builder 7.11.9"
category = "dev"
optional = false
python-versions = ">=3.7"
@@ -1058,7 +1062,7 @@ histogram = ["pygal", "pygaljs"]
[[package]]
name = "pytest-cov"
-version = "3.0.0"
+version = "4.0.0"
description = "Pytest plugin for measuring coverage."
category = "dev"
optional = false
@@ -1233,6 +1237,18 @@ botocore = ">=1.12.36,<2.0a.0"
[package.extras]
crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
+[[package]]
+name = "setuptools"
+version = "59.6.0"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=8.2)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-inline-tabs", "sphinxcontrib-towncrier"]
+testing = ["flake8-2020", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "paver", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy", "pytest-virtualenv (>=1.2.7)", "pytest-xdist", "sphinx", "virtualenv (>=13.0.0)", "wheel"]
+
[[package]]
name = "six"
version = "1.16.0"
@@ -1279,7 +1295,7 @@ python-versions = ">=3.6"
[[package]]
name = "types-requests"
-version = "2.28.10"
+version = "2.28.11.1"
description = "Typing stubs for requests"
category = "dev"
optional = false
@@ -1367,14 +1383,28 @@ pydantic = ["pydantic", "email-validator"]
[metadata]
lock-version = "1.1"
python-versions = "^3.6.2"
-content-hash = "a2f26ad6d2e0860b1543ddea2a3d46002a2d980797ec5c411333de54776eed89"
+content-hash = "bfa6597ca1a4b8e1199f55f2404e7caee56d30274072624165533b39f726e0a2"
[metadata.files]
-atomicwrites = []
-attrs = []
-aws-cdk-lib = []
-aws-xray-sdk = []
-bandit = []
+atomicwrites = [
+ {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"},
+]
+attrs = [
+ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
+ {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
+]
+aws-cdk-lib = [
+ {file = "aws-cdk-lib-2.23.0.tar.gz", hash = "sha256:3e07d1c6b320795d38567be183e56c2125b4c4492589775257aabec3d3e2a384"},
+ {file = "aws_cdk_lib-2.23.0-py3-none-any.whl", hash = "sha256:1ec04a146d3364cd0fc4da08e3f8ca25e28df68abaa90641936db17a415ca4bc"},
+]
+aws-xray-sdk = [
+ {file = "aws-xray-sdk-2.10.0.tar.gz", hash = "sha256:9b14924fd0628cf92936055864655354003f0b1acc3e1c3ffde6403d0799dd7a"},
+ {file = "aws_xray_sdk-2.10.0-py2.py3-none-any.whl", hash = "sha256:7551e81a796e1a5471ebe84844c40e8edf7c218db33506d046fec61f7495eda4"},
+]
+bandit = [
+ {file = "bandit-1.7.1-py3-none-any.whl", hash = "sha256:f5acd838e59c038a159b5c621cf0f8270b279e884eadd7b782d7491c02add0d4"},
+ {file = "bandit-1.7.1.tar.gz", hash = "sha256:a81b00b5436e6880fa8ad6799bc830e02032047713cbb143a12939ac67eb756c"},
+]
black = [
{file = "black-22.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd"},
{file = "black-22.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27"},
@@ -1408,7 +1438,12 @@ botocore = [
{file = "botocore-1.26.10-py3-none-any.whl", hash = "sha256:8a4a984bf901ccefe40037da11ba2abd1ddbcb3b490a492b7f218509c99fc12f"},
{file = "botocore-1.26.10.tar.gz", hash = "sha256:5df2cf7ebe34377470172bd0bbc582cf98c5cbd02da0909a14e9e2885ab3ae9c"},
]
-cattrs = []
+cattrs = [
+ {file = "cattrs-1.0.0-py2.py3-none-any.whl", hash = "sha256:616972ae3dfa6e623a40ad3cb845420e64942989152774ab055e5c2b2f89f997"},
+ {file = "cattrs-1.0.0.tar.gz", hash = "sha256:b7ab5cf8ad127c42eefd01410c1c6e28569a45a255ea80ed968511873c433c7a"},
+ {file = "cattrs-22.1.0-py3-none-any.whl", hash = "sha256:d55c477b4672f93606e992049f15d526dc7867e6c756cd6256d4af92e2b1e364"},
+ {file = "cattrs-22.1.0.tar.gz", hash = "sha256:94b67b64cf92c994f8784c40c082177dc916e0489a73a9a36b24eb18a9db40c6"},
+]
certifi = [
{file = "certifi-2022.6.15.1-py3-none-any.whl", hash = "sha256:43dadad18a7f168740e66944e4fa82c6611848ff9056ad910f8f7a3e46ab89e0"},
{file = "certifi-2022.6.15.1.tar.gz", hash = "sha256:cffdcd380919da6137f76633531a5817e3a9f268575c128249fb637e4f9e73fb"},
@@ -1421,16 +1456,79 @@ click = [
{file = "click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"},
{file = "click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"},
]
-colorama = []
-constructs = []
-coverage = []
-dataclasses = []
-decorator = []
+colorama = [
+ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
+ {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
+]
+constructs = [
+ {file = "constructs-10.1.1-py3-none-any.whl", hash = "sha256:c1f3deb196f54e070ded3c92c4339f73ef2b6022d35fb34908c0ebfa7ef8a640"},
+ {file = "constructs-10.1.1.tar.gz", hash = "sha256:6ce0dd1352367237b5d7c51a25740482c852735d2a5e067c536acc1657f39ea5"},
+]
+coverage = [
+ {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"},
+ {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"},
+ {file = "coverage-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da"},
+ {file = "coverage-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d"},
+ {file = "coverage-6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739"},
+ {file = "coverage-6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971"},
+ {file = "coverage-6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840"},
+ {file = "coverage-6.2-cp310-cp310-win32.whl", hash = "sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c"},
+ {file = "coverage-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f"},
+ {file = "coverage-6.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76"},
+ {file = "coverage-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47"},
+ {file = "coverage-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64"},
+ {file = "coverage-6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9"},
+ {file = "coverage-6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d"},
+ {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48"},
+ {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e"},
+ {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d"},
+ {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17"},
+ {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781"},
+ {file = "coverage-6.2-cp36-cp36m-win32.whl", hash = "sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a"},
+ {file = "coverage-6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0"},
+ {file = "coverage-6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"},
+ {file = "coverage-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521"},
+ {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884"},
+ {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa"},
+ {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64"},
+ {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617"},
+ {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8"},
+ {file = "coverage-6.2-cp37-cp37m-win32.whl", hash = "sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4"},
+ {file = "coverage-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74"},
+ {file = "coverage-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e"},
+ {file = "coverage-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58"},
+ {file = "coverage-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc"},
+ {file = "coverage-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd"},
+ {file = "coverage-6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953"},
+ {file = "coverage-6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475"},
+ {file = "coverage-6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57"},
+ {file = "coverage-6.2-cp38-cp38-win32.whl", hash = "sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c"},
+ {file = "coverage-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2"},
+ {file = "coverage-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd"},
+ {file = "coverage-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685"},
+ {file = "coverage-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c"},
+ {file = "coverage-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3"},
+ {file = "coverage-6.2-cp39-cp39-win32.whl", hash = "sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282"},
+ {file = "coverage-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644"},
+ {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"},
+ {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"},
+]
+dataclasses = [
+ {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"},
+ {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"},
+]
+decorator = [
+ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
+ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
+]
dnspython = [
{file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"},
{file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"},
]
-email-validator = []
+email-validator = [
+ {file = "email_validator-1.3.0-py2.py3-none-any.whl", hash = "sha256:816073f2a7cffef786b29928f58ec16cdac42710a53bb18aa94317e3e145ec5c"},
+ {file = "email_validator-1.3.0.tar.gz", hash = "sha256:553a66f8be2ec2dea641ae1d3f29017ab89e9d603d4a25cdaac39eefa283d769"},
+]
eradicate = [
{file = "eradicate-2.1.0-py3-none-any.whl", hash = "sha256:8bfaca181db9227dc88bdbce4d051a9627604c2243e7d85324f6d6ce0fd08bb2"},
{file = "eradicate-2.1.0.tar.gz", hash = "sha256:aac7384ab25b1bf21c4c012de9b4bf8398945a14c98c911545b2ea50ab558014"},
@@ -1439,8 +1537,14 @@ exceptiongroup = [
{file = "exceptiongroup-1.0.0rc9-py3-none-any.whl", hash = "sha256:2e3c3fc1538a094aab74fad52d6c33fc94de3dfee3ee01f187c0e0c72aec5337"},
{file = "exceptiongroup-1.0.0rc9.tar.gz", hash = "sha256:9086a4a21ef9b31c72181c77c040a074ba0889ee56a7b289ff0afb0d97655f96"},
]
-execnet = []
-fastjsonschema = []
+execnet = [
+ {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"},
+ {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"},
+]
+fastjsonschema = [
+ {file = "fastjsonschema-2.16.2-py3-none-any.whl", hash = "sha256:21f918e8d9a1a4ba9c22e09574ba72267a6762d47822db9add95f6454e51cc1c"},
+ {file = "fastjsonschema-2.16.2.tar.gz", hash = "sha256:01e366f25d9047816fe3d288cbfc3e10541daf0af2044763f3d0ade42476da18"},
+]
filelock = [
{file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"},
{file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"},
@@ -1450,29 +1554,51 @@ flake8 = [
{file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"},
]
flake8-bugbear = [
- {file = "flake8-bugbear-22.9.11.tar.gz", hash = "sha256:39236c0e97160d1ab05d9f87422173d16e925a6220b3635bfc4aee766bf8194a"},
- {file = "flake8_bugbear-22.9.11-py3-none-any.whl", hash = "sha256:e74350a4cfc670e184f3433c223b1e7378f1cf8345ded6c8f12ac1a50c5df22b"},
+ {file = "flake8-bugbear-22.9.23.tar.gz", hash = "sha256:17b9623325e6e0dcdcc80ed9e4aa811287fcc81d7e03313b8736ea5733759937"},
+ {file = "flake8_bugbear-22.9.23-py3-none-any.whl", hash = "sha256:cd2779b2b7ada212d7a322814a1e5651f1868ab0d3f24cc9da66169ab8fda474"},
+]
+flake8-builtins = [
+ {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"},
+ {file = "flake8_builtins-1.5.3-py2.py3-none-any.whl", hash = "sha256:7706babee43879320376861897e5d1468e396a40b8918ed7bccf70e5f90b8687"},
+]
+flake8-comprehensions = [
+ {file = "flake8-comprehensions-3.7.0.tar.gz", hash = "sha256:6b3218b2dde8ac5959c6476cde8f41a79e823c22feb656be2710cd2a3232cef9"},
+ {file = "flake8_comprehensions-3.7.0-py3-none-any.whl", hash = "sha256:a5d7aea6315bbbd6fbcb2b4e80bff6a54d1600155e26236e555d0c6fe1d62522"},
+]
+flake8-debugger = [
+ {file = "flake8-debugger-4.0.0.tar.gz", hash = "sha256:e43dc777f7db1481db473210101ec2df2bd39a45b149d7218a618e954177eda6"},
+ {file = "flake8_debugger-4.0.0-py3-none-any.whl", hash = "sha256:82e64faa72e18d1bdd0000407502ebb8ecffa7bc027c62b9d4110ce27c091032"},
]
-flake8-builtins = []
-flake8-comprehensions = []
-flake8-debugger = []
flake8-eradicate = [
{file = "flake8-eradicate-1.3.0.tar.gz", hash = "sha256:e4c98f00d17dc8653e3388cac2624cd81e9735de2fd4a8dcf99029633ebd7a63"},
{file = "flake8_eradicate-1.3.0-py3-none-any.whl", hash = "sha256:85a71e0c5f4e07f7c6c5fec520483561fd6bd295417d622855bdeade99242e3d"},
]
-flake8-fixme = []
+flake8-fixme = [
+ {file = "flake8-fixme-1.1.1.tar.gz", hash = "sha256:50cade07d27a4c30d4f12351478df87339e67640c83041b664724bda6d16f33a"},
+ {file = "flake8_fixme-1.1.1-py2.py3-none-any.whl", hash = "sha256:226a6f2ef916730899f29ac140bed5d4a17e5aba79f00a0e3ae1eff1997cb1ac"},
+]
flake8-isort = [
{file = "flake8-isort-4.2.0.tar.gz", hash = "sha256:26571500cd54976bbc0cf1006ffbcd1a68dd102f816b7a1051b219616ba9fee0"},
{file = "flake8_isort-4.2.0-py3-none-any.whl", hash = "sha256:5b87630fb3719bf4c1833fd11e0d9534f43efdeba524863e15d8f14a7ef6adbf"},
]
-flake8-variables-names = []
-future = []
+flake8-variables-names = [
+ {file = "flake8_variables_names-0.0.4.tar.gz", hash = "sha256:d6fa0571a807c72940b5773827c5760421ea6f8206595ff0a8ecfa01e42bf2cf"},
+]
+future = [
+ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"},
+]
ghp-import = [
{file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"},
{file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"},
]
-gitdb = []
-gitpython = []
+gitdb = [
+ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"},
+ {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"},
+]
+gitpython = [
+ {file = "GitPython-3.1.20-py3-none-any.whl", hash = "sha256:b1e1c269deab1b08ce65403cf14e10d2ef1f6c89e33ea7c5e5bb0222ea593b8a"},
+ {file = "GitPython-3.1.20.tar.gz", hash = "sha256:df0e072a200703a65387b0cfdf0466e3bab729c0458cf6b7349d0e9877636519"},
+]
idna = [
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
@@ -1481,36 +1607,165 @@ importlib-metadata = [
{file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"},
{file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"},
]
-importlib-resources = []
-iniconfig = []
-isort = []
-jinja2 = []
-jmespath = []
-jsii = []
-mako = [
- {file = "Mako-1.2.2-py3-none-any.whl", hash = "sha256:8efcb8004681b5f71d09c983ad5a9e6f5c40601a6ec469148753292abc0da534"},
- {file = "Mako-1.2.2.tar.gz", hash = "sha256:3724869b363ba630a272a5f89f68c070352137b8fd1757650017b7e06fda163f"},
-]
-mando = []
+importlib-resources = [
+ {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"},
+ {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"},
+]
+iniconfig = [
+ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
+ {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
+]
+isort = [
+ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
+ {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
+]
+jinja2 = [
+ {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"},
+ {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"},
+]
+jmespath = [
+ {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"},
+ {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"},
+]
+jsii = [
+ {file = "jsii-1.57.0-py3-none-any.whl", hash = "sha256:4888091986a9ed8d50b042cc9c35a9564dd54c19e78adb890bf06d9ffac1b325"},
+ {file = "jsii-1.57.0.tar.gz", hash = "sha256:ff7a3c51c1a653dd8a4342043b5f8e40b928bc617e3141e0d5d66175d22a754b"},
+]
+Mako = [
+ {file = "Mako-1.2.3-py3-none-any.whl", hash = "sha256:c413a086e38cd885088d5e165305ee8eed04e8b3f8f62df343480da0a385735f"},
+ {file = "Mako-1.2.3.tar.gz", hash = "sha256:7fde96466fcfeedb0eed94f187f20b23d85e4cb41444be0e542e2c8c65c396cd"},
+]
+mando = [
+ {file = "mando-0.6.4-py2.py3-none-any.whl", hash = "sha256:4ce09faec7e5192ffc3c57830e26acba0fd6cd11e1ee81af0d4df0657463bd1c"},
+ {file = "mando-0.6.4.tar.gz", hash = "sha256:79feb19dc0f097daa64a1243db578e7674909b75f88ac2220f1c065c10a0d960"},
+]
markdown = [
{file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"},
{file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"},
]
-markupsafe = []
-mccabe = []
-mergedeep = []
-mike = []
+markupsafe = [
+ {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"},
+ {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"},
+]
+mccabe = [
+ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
+ {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
+]
+mergedeep = [
+ {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"},
+ {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"},
+]
+mike = [
+ {file = "mike-0.6.0-py3-none-any.whl", hash = "sha256:cef9b9c803ff5c3fbb410f51f5ceb00902a9fe16d9fabd93b69c65cf481ab5a1"},
+ {file = "mike-0.6.0.tar.gz", hash = "sha256:6d6239de2a60d733da2f34617e9b9a14c4b5437423b47e524f14dc96d6ce5f2f"},
+]
mkdocs = [
{file = "mkdocs-1.3.1-py3-none-any.whl", hash = "sha256:fda92466393127d2da830bc6edc3a625a14b436316d1caf347690648e774c4f0"},
{file = "mkdocs-1.3.1.tar.gz", hash = "sha256:a41a2ff25ce3bbacc953f9844ba07d106233cd76c88bac1f59cb1564ac0d87ed"},
]
-mkdocs-git-revision-date-plugin = []
+mkdocs-git-revision-date-plugin = [
+ {file = "mkdocs_git_revision_date_plugin-0.3.2-py3-none-any.whl", hash = "sha256:2e67956cb01823dd2418e2833f3623dee8604cdf223bddd005fe36226a56f6ef"},
+]
mkdocs-material = [
- {file = "mkdocs-material-8.5.1.tar.gz", hash = "sha256:2daf604d00d554d5496c02b4c4d14dfa57dd689c90f639da5020601baef4b235"},
- {file = "mkdocs_material-8.5.1-py2.py3-none-any.whl", hash = "sha256:79e9b65e481edb539eda6a3a939a0e02609a5c54afc315c3b05f57ff40db3188"},
+ {file = "mkdocs_material-8.5.4-py3-none-any.whl", hash = "sha256:aec2f0f2143109f8388aadf76e6fff749a2b74ebe730d0f674c65b53da89d19d"},
+ {file = "mkdocs_material-8.5.4.tar.gz", hash = "sha256:70dc47820d4765b77968b9119f2957d09b4d8d328d950bee4544ff224d5c7b36"},
+]
+mkdocs-material-extensions = [
+ {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"},
+ {file = "mkdocs_material_extensions-1.0.3-py3-none-any.whl", hash = "sha256:a82b70e533ce060b2a5d9eb2bc2e1be201cf61f901f93704b4acf6e3d5983a44"},
+]
+mypy = [
+ {file = "mypy-0.971-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2899a3cbd394da157194f913a931edfd4be5f274a88041c9dc2d9cdcb1c315c"},
+ {file = "mypy-0.971-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98e02d56ebe93981c41211c05adb630d1d26c14195d04d95e49cd97dbc046dc5"},
+ {file = "mypy-0.971-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:19830b7dba7d5356d3e26e2427a2ec91c994cd92d983142cbd025ebe81d69cf3"},
+ {file = "mypy-0.971-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02ef476f6dcb86e6f502ae39a16b93285fef97e7f1ff22932b657d1ef1f28655"},
+ {file = "mypy-0.971-cp310-cp310-win_amd64.whl", hash = "sha256:25c5750ba5609a0c7550b73a33deb314ecfb559c350bb050b655505e8aed4103"},
+ {file = "mypy-0.971-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d3348e7eb2eea2472db611486846742d5d52d1290576de99d59edeb7cd4a42ca"},
+ {file = "mypy-0.971-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3fa7a477b9900be9b7dd4bab30a12759e5abe9586574ceb944bc29cddf8f0417"},
+ {file = "mypy-0.971-cp36-cp36m-win_amd64.whl", hash = "sha256:2ad53cf9c3adc43cf3bea0a7d01a2f2e86db9fe7596dfecb4496a5dda63cbb09"},
+ {file = "mypy-0.971-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:855048b6feb6dfe09d3353466004490b1872887150c5bb5caad7838b57328cc8"},
+ {file = "mypy-0.971-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:23488a14a83bca6e54402c2e6435467a4138785df93ec85aeff64c6170077fb0"},
+ {file = "mypy-0.971-cp37-cp37m-win_amd64.whl", hash = "sha256:4b21e5b1a70dfb972490035128f305c39bc4bc253f34e96a4adf9127cf943eb2"},
+ {file = "mypy-0.971-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9796a2ba7b4b538649caa5cecd398d873f4022ed2333ffde58eaf604c4d2cb27"},
+ {file = "mypy-0.971-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a361d92635ad4ada1b1b2d3630fc2f53f2127d51cf2def9db83cba32e47c856"},
+ {file = "mypy-0.971-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b793b899f7cf563b1e7044a5c97361196b938e92f0a4343a5d27966a53d2ec71"},
+ {file = "mypy-0.971-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d1ea5d12c8e2d266b5fb8c7a5d2e9c0219fedfeb493b7ed60cd350322384ac27"},
+ {file = "mypy-0.971-cp38-cp38-win_amd64.whl", hash = "sha256:23c7ff43fff4b0df93a186581885c8512bc50fc4d4910e0f838e35d6bb6b5e58"},
+ {file = "mypy-0.971-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f7656b69974a6933e987ee8ffb951d836272d6c0f81d727f1d0e2696074d9e6"},
+ {file = "mypy-0.971-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2022bfadb7a5c2ef410d6a7c9763188afdb7f3533f22a0a32be10d571ee4bbe"},
+ {file = "mypy-0.971-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef943c72a786b0f8d90fd76e9b39ce81fb7171172daf84bf43eaf937e9f220a9"},
+ {file = "mypy-0.971-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d744f72eb39f69312bc6c2abf8ff6656973120e2eb3f3ec4f758ed47e414a4bf"},
+ {file = "mypy-0.971-cp39-cp39-win_amd64.whl", hash = "sha256:77a514ea15d3007d33a9e2157b0ba9c267496acf12a7f2b9b9f8446337aac5b0"},
+ {file = "mypy-0.971-py3-none-any.whl", hash = "sha256:0d054ef16b071149917085f51f89555a576e2618d5d9dd70bd6eea6410af3ac9"},
+ {file = "mypy-0.971.tar.gz", hash = "sha256:40b0f21484238269ae6a57200c807d80debc6459d444c0489a102d7c6a75fa56"},
]
-mkdocs-material-extensions = []
-mypy = []
mypy-boto3-appconfig = [
{file = "mypy-boto3-appconfig-1.24.36.post1.tar.gz", hash = "sha256:e1916b3754915cb411ef977083500e1f30f81f7b3aea6ff5eed1cec91944dea6"},
{file = "mypy_boto3_appconfig-1.24.36.post1-py3-none-any.whl", hash = "sha256:a5dbe549dbebf4bc7a6cfcbfa9dff89ceb4983c042b785763ee656504bdb49f6"},
@@ -1536,34 +1791,60 @@ mypy-boto3-logs = [
{file = "mypy_boto3_logs-1.24.36.post1-py3-none-any.whl", hash = "sha256:f96257ec06099bfda1ce5f35b410e7fb93fb601bc312e8d7a09b13adaefd23f0"},
]
mypy-boto3-s3 = [
- {file = "mypy-boto3-s3-1.24.36.post1.tar.gz", hash = "sha256:3bd7e06f9ade5059eae2181d7a9f1a41e7fa807ad3e94c01c9901838e87e0abe"},
- {file = "mypy_boto3_s3-1.24.36.post1-py3-none-any.whl", hash = "sha256:30ae59b33c55f8b7b693170f9519ea5b91a2fbf31a73de79cdef57a27d784e5a"},
+ {file = "mypy-boto3-s3-1.24.76.tar.gz", hash = "sha256:7225362fd6f0d894b521615fe124955856ff8f1ecdae5375ac20b334beb4ad9c"},
+ {file = "mypy_boto3_s3-1.24.76-py3-none-any.whl", hash = "sha256:fd71cd3894a4da0dfcf1372984e667bfea8df930a1d27906206b6bb4d82ca418"},
]
mypy-boto3-secretsmanager = [
- {file = "mypy-boto3-secretsmanager-1.24.54.tar.gz", hash = "sha256:a846b79f86e218a794dbc858c08290bb6aebffa180c80cf0a463c32a04621ff1"},
- {file = "mypy_boto3_secretsmanager-1.24.54-py3-none-any.whl", hash = "sha256:b89c9a0ff65a8ab2c4e4d3f6e721a0477b7d0fec246ffc08e4378420eb50b4d0"},
+ {file = "mypy-boto3-secretsmanager-1.24.83.tar.gz", hash = "sha256:e39b55ffa05310832544bc4212c5b6fad4f97bf9f60f05827385d266f481a7ff"},
+ {file = "mypy_boto3_secretsmanager-1.24.83-py3-none-any.whl", hash = "sha256:9ed3ec38a6c05961cb39a2d9fb891441d4cf22c63e34a6998fbd3d28ba290d9a"},
]
mypy-boto3-ssm = [
- {file = "mypy-boto3-ssm-1.24.69.tar.gz", hash = "sha256:e084dc97ff946ef46fb36366db5fefaf948e761ed9488f91e281485e07885ad1"},
- {file = "mypy_boto3_ssm-1.24.69-py3-none-any.whl", hash = "sha256:066af3da69da431353db4862a3d8f49ad8021be122e80b7333a6746e39d35012"},
+ {file = "mypy-boto3-ssm-1.24.81.tar.gz", hash = "sha256:2b3167faa868442e43f0c6065fac8549762aafc967e487aae2d9e15c5bad20c3"},
+ {file = "mypy_boto3_ssm-1.24.81-py3-none-any.whl", hash = "sha256:a50fe448f3c18f76255e15878e21020001ec04a85b42996db721d9b89770ff11"},
]
mypy-boto3-xray = [
{file = "mypy-boto3-xray-1.24.36.post1.tar.gz", hash = "sha256:104f1ecf7f1f6278c582201e71a7ab64843d3a3fdc8f23295cf68788cc77e9bb"},
{file = "mypy_boto3_xray-1.24.36.post1-py3-none-any.whl", hash = "sha256:97b9f0686c717c8be99ac06cb52febaf71712b4e4cd0b61ed2eb5ed012a9b5fd"},
]
-mypy-extensions = []
-packaging = []
-pathspec = []
+mypy-extensions = [
+ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
+ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
+]
+packaging = [
+ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
+ {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
+]
+pathspec = [
+ {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
+ {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
+]
pbr = [
{file = "pbr-5.10.0-py2.py3-none-any.whl", hash = "sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf"},
{file = "pbr-5.10.0.tar.gz", hash = "sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a"},
]
-pdoc3 = []
-platformdirs = []
-pluggy = []
-publication = []
-py = []
-py-cpuinfo = []
+pdoc3 = [
+ {file = "pdoc3-0.10.0-py3-none-any.whl", hash = "sha256:ba45d1ada1bd987427d2bf5cdec30b2631a3ff5fb01f6d0e77648a572ce6028b"},
+ {file = "pdoc3-0.10.0.tar.gz", hash = "sha256:5f22e7bcb969006738e1aa4219c75a32f34c2d62d46dc9d2fb2d3e0b0287e4b7"},
+]
+platformdirs = [
+ {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"},
+ {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"},
+]
+pluggy = [
+ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
+ {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
+]
+publication = [
+ {file = "publication-0.0.3-py2.py3-none-any.whl", hash = "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6"},
+ {file = "publication-0.0.3.tar.gz", hash = "sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4"},
+]
+py = [
+ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
+ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
+]
+py-cpuinfo = [
+ {file = "py-cpuinfo-8.0.0.tar.gz", hash = "sha256:5f269be0e08e33fd959de96b34cd4aeeeacac014dd8305f70eb28d06de2345c5"},
+]
pycodestyle = [
{file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"},
{file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"},
@@ -1613,19 +1894,46 @@ pygments = [
{file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"},
{file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"},
]
-pymdown-extensions = []
+pymdown-extensions = [
+ {file = "pymdown_extensions-9.5-py3-none-any.whl", hash = "sha256:ec141c0f4983755349f0c8710416348d1a13753976c028186ed14f190c8061c4"},
+ {file = "pymdown_extensions-9.5.tar.gz", hash = "sha256:3ef2d998c0d5fa7eb09291926d90d69391283561cf6306f85cd588a5eb5befa0"},
+]
pyparsing = [
{file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"},
{file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"},
]
-pytest = []
-pytest-asyncio = []
-pytest-benchmark = []
-pytest-cov = []
-pytest-forked = []
-pytest-mock = []
-pytest-xdist = []
-python-dateutil = []
+pytest = [
+ {file = "pytest-7.0.1-py3-none-any.whl", hash = "sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db"},
+ {file = "pytest-7.0.1.tar.gz", hash = "sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171"},
+]
+pytest-asyncio = [
+ {file = "pytest-asyncio-0.16.0.tar.gz", hash = "sha256:7496c5977ce88c34379df64a66459fe395cd05543f0a2f837016e7144391fcfb"},
+ {file = "pytest_asyncio-0.16.0-py3-none-any.whl", hash = "sha256:5f2a21273c47b331ae6aa5b36087047b4899e40f03f18397c0e65fa5cca54e9b"},
+]
+pytest-benchmark = [
+ {file = "pytest-benchmark-3.4.1.tar.gz", hash = "sha256:40e263f912de5a81d891619032983557d62a3d85843f9a9f30b98baea0cd7b47"},
+ {file = "pytest_benchmark-3.4.1-py2.py3-none-any.whl", hash = "sha256:36d2b08c4882f6f997fd3126a3d6dfd70f3249cde178ed8bbc0b73db7c20f809"},
+]
+pytest-cov = [
+ {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"},
+ {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"},
+]
+pytest-forked = [
+ {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"},
+ {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"},
+]
+pytest-mock = [
+ {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"},
+ {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"},
+]
+pytest-xdist = [
+ {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"},
+ {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"},
+]
+python-dateutil = [
+ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
+ {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
+]
python-snappy = [
{file = "python-snappy-0.6.1.tar.gz", hash = "sha256:b6a107ab06206acc5359d4c5632bd9b22d448702a79b3169b0c62e0fb808bb2a"},
{file = "python_snappy-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b7f920eaf46ebf41bd26f9df51c160d40f9e00b7b48471c3438cb8d027f7fb9b"},
@@ -1684,6 +1992,13 @@ pyyaml = [
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
{file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
{file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
+ {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
+ {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
+ {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
+ {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
{file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
@@ -1711,28 +2026,78 @@ pyyaml = [
{file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
]
-pyyaml-env-tag = []
-radon = []
+pyyaml-env-tag = [
+ {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"},
+ {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"},
+]
+radon = [
+ {file = "radon-5.1.0-py2.py3-none-any.whl", hash = "sha256:fa74e018197f1fcb54578af0f675d8b8e2342bd8e0b72bef8197bc4c9e645f36"},
+ {file = "radon-5.1.0.tar.gz", hash = "sha256:cb1d8752e5f862fb9e20d82b5f758cbc4fb1237c92c9a66450ea0ea7bf29aeee"},
+]
requests = [
{file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
{file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
]
-retry = []
+retry = [
+ {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"},
+ {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"},
+]
"ruamel.yaml" = [
{file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"},
{file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"},
]
-"ruamel.yaml.clib" = []
+"ruamel.yaml.clib" = [
+ {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"},
+ {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:066f886bc90cc2ce44df8b5f7acfc6a7e2b2e672713f027136464492b0c34d7c"},
+ {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"},
+ {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"},
+ {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"},
+ {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"},
+ {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"},
+ {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"},
+ {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"},
+ {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"},
+ {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"},
+ {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d3c620a54748a3d4cf0bcfe623e388407c8e85a4b06b8188e126302bcab93ea8"},
+ {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"},
+ {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"},
+ {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"},
+ {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"},
+ {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:210c8fcfeff90514b7133010bf14e3bad652c8efde6b20e00c43854bf94fa5a6"},
+ {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"},
+ {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"},
+ {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"},
+ {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"},
+ {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:61bc5e5ca632d95925907c569daa559ea194a4d16084ba86084be98ab1cec1c6"},
+ {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"},
+ {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"},
+ {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"},
+ {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"},
+ {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1b4139a6ffbca8ef60fdaf9b33dec05143ba746a6f0ae0f9d11d38239211d335"},
+ {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"},
+ {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"},
+ {file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"},
+]
s3transfer = [
{file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"},
{file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"},
]
+setuptools = [
+ {file = "setuptools-59.6.0-py3-none-any.whl", hash = "sha256:4ce92f1e1f8f01233ee9952c04f6b81d1e02939d6e1b488428154974a4d0783e"},
+ {file = "setuptools-59.6.0.tar.gz", hash = "sha256:22c7348c6d2976a52632c67f7ab0cdf40147db7789f9aed18734643fe9cf3373"},
+]
six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
-smmap = []
-stevedore = []
+smmap = [
+ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"},
+ {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"},
+]
+stevedore = [
+ {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"},
+ {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"},
+]
tomli = [
{file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"},
{file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"},
@@ -1764,8 +2129,8 @@ typed-ast = [
{file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"},
]
types-requests = [
- {file = "types-requests-2.28.10.tar.gz", hash = "sha256:97d8f40aa1ffe1e58c3726c77d63c182daea9a72d9f1fa2cafdea756b2a19f2c"},
- {file = "types_requests-2.28.10-py3-none-any.whl", hash = "sha256:45b485725ed58752f2b23461252f1c1ad9205b884a1e35f786bb295525a3e16a"},
+ {file = "types-requests-2.28.11.1.tar.gz", hash = "sha256:02b1806c5b9904edcd87fa29236164aea0e6cdc4d93ea020cd615ef65cb43d65"},
+ {file = "types_requests-2.28.11.1-py3-none-any.whl", hash = "sha256:1ff2c1301f6fe58b5d1c66cdf631ca19734cb3b1a4bbadc878d75557d183291a"},
]
types-urllib3 = [
{file = "types-urllib3-1.26.24.tar.gz", hash = "sha256:a1b3aaea7dda3eb1b51699ee723aadd235488e4dc4648e030f09bc429ecff42f"},
@@ -1872,5 +2237,11 @@ wrapt = [
{file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"},
{file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"},
]
-xenon = []
-zipp = []
+xenon = [
+ {file = "xenon-0.9.0-py2.py3-none-any.whl", hash = "sha256:994c80c7f1c6d40596b600b93734d85a5739208f31895ef99f1e4d362caf9e35"},
+ {file = "xenon-0.9.0.tar.gz", hash = "sha256:d2b9cb6c6260f771a432c1e588e51fddb17858f88f73ef641e7532f7a5f58fb8"},
+]
+zipp = [
+ {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"},
+ {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"},
+]
diff --git a/pyproject.toml b/pyproject.toml
index 281d66c0277..a63f9360311 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "aws_lambda_powertools"
-version = "1.26.6"
+version = "1.29.2"
description = "A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, batching, idempotency, feature flags, and more."
authors = ["Amazon Web Services"]
include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"]
@@ -40,7 +40,7 @@ flake8-fixme = "^1.1.1"
flake8-isort = "^4.1.2"
flake8-variables-names = "^0.0.4"
isort = "^5.10.1"
-pytest-cov = "^3.0.0"
+pytest-cov = "^4.0.0"
pytest-mock = "^3.5.1"
pdoc3 = "^0.10.0"
pytest-asyncio = "^0.16.0"
@@ -48,7 +48,7 @@ bandit = "^1.7.1"
radon = "^5.1.0"
xenon = "^0.9.0"
flake8-eradicate = "^1.2.1"
-flake8-bugbear = "^22.8.23"
+flake8-bugbear = "^22.9.23"
mkdocs-git-revision-date-plugin = "^0.3.2"
mike = "^0.6.0"
mypy = "^0.971"
@@ -62,17 +62,17 @@ mypy-boto3-cloudwatch = { version = "^1.24.35", python = ">=3.7" }
mypy-boto3-dynamodb = { version = "^1.24.74", python = ">=3.7" }
mypy-boto3-lambda = { version = "^1.24.0", python = ">=3.7" }
mypy-boto3-logs = { version = "^1.24.0", python = ">=3.7" }
-mypy-boto3-secretsmanager = { version = "^1.24.11", python = ">=3.7" }
-mypy-boto3-ssm = { version = "^1.24.0", python = ">=3.7" }
-mypy-boto3-s3 = { version = "^1.24.0", python = ">=3.7" }
+mypy-boto3-secretsmanager = { version = "^1.24.83", python = ">=3.7" }
+mypy-boto3-ssm = { version = "^1.24.81", python = ">=3.7" }
+mypy-boto3-s3 = { version = "^1.24.76", python = ">=3.7" }
mypy-boto3-xray = { version = "^1.24.0", python = ">=3.7" }
-types-requests = "^2.28.8"
+types-requests = "^2.28.11"
typing-extensions = { version = "^4.3.0", python = ">=3.7" }
python-snappy = "^0.6.1"
-mkdocs-material = { version = "^8.5.1", python = ">=3.7" }
+mkdocs-material = { version = "^8.5.4", python = ">=3.7" }
filelock = { version = "^3.8.0", python = ">=3.7" }
# Maintenance: 2022-09-19 pinned mako to fix vulnerability as a pdoc3 dependency. Remove once we drop python 3.6.
-Mako = {version = "1.2.2", python = ">=3.7"}
+Mako = {version = "1.2.3", python = ">=3.7"}
[tool.poetry.extras]
pydantic = ["pydantic", "email-validator"]
@@ -112,11 +112,7 @@ exclude_lines = [
]
[tool.isort]
-multi_line_output = 3
-include_trailing_comma = true
-force_grid_wrap = 0
-use_parentheses = true
-line_length = 120
+profile = "black" # resolves conflict with black
skip = "example"
[tool.black]
diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py
index cddd6844504..87be83d2f96 100644
--- a/tests/e2e/utils/infrastructure.py
+++ b/tests/e2e/utils/infrastructure.py
@@ -10,7 +10,16 @@
import boto3
import pytest
import yaml
-from aws_cdk import App, AssetStaging, BundlingOptions, CfnOutput, DockerImage, RemovalPolicy, Stack, aws_logs
+from aws_cdk import (
+ App,
+ AssetStaging,
+ BundlingOptions,
+ CfnOutput,
+ DockerImage,
+ RemovalPolicy,
+ Stack,
+ aws_logs,
+)
from aws_cdk.aws_lambda import Code, Function, LayerVersion, Runtime, Tracing
from filelock import FileLock
from mypy_boto3_cloudformation import CloudFormationClient
diff --git a/tests/functional/data_classes/test_amazon_mq.py b/tests/functional/data_classes/test_amazon_mq.py
index a88a962c17b..bca2c6e8a8a 100644
--- a/tests/functional/data_classes/test_amazon_mq.py
+++ b/tests/functional/data_classes/test_amazon_mq.py
@@ -1,7 +1,14 @@
from typing import Dict
-from aws_lambda_powertools.utilities.data_classes.active_mq_event import ActiveMQEvent, ActiveMQMessage
-from aws_lambda_powertools.utilities.data_classes.rabbit_mq_event import BasicProperties, RabbitMessage, RabbitMQEvent
+from aws_lambda_powertools.utilities.data_classes.active_mq_event import (
+ ActiveMQEvent,
+ ActiveMQMessage,
+)
+from aws_lambda_powertools.utilities.data_classes.rabbit_mq_event import (
+ BasicProperties,
+ RabbitMessage,
+ RabbitMQEvent,
+)
from tests.functional.utils import load_event
diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py
index 4b1d7c1ee32..ae2c3eee43e 100644
--- a/tests/functional/event_handler/test_api_gateway.py
+++ b/tests/functional/event_handler/test_api_gateway.py
@@ -686,6 +686,16 @@ def test_debug_mode_environment_variable(monkeypatch):
assert app._debug
+def test_powertools_dev_sets_debug_mode(monkeypatch):
+ # GIVEN a debug mode environment variable is set
+ monkeypatch.setenv(constants.POWERTOOLS_DEV_ENV, "true")
+ app = ApiGatewayResolver()
+
+ # WHEN calling app._debug
+ # THEN the debug mode is enabled
+ assert app._debug
+
+
def test_debug_json_formatting(json_dump):
# GIVEN debug is True
app = ApiGatewayResolver(debug=True)
@@ -1289,3 +1299,73 @@ def handler(event: APIGatewayProxyEventV2, context):
# THEN
result = handler(load_event("apiGatewayProxyV2Event.json"), None)
assert result["statusCode"] == 200
+
+
+def test_response_with_status_code_only():
+ ret = Response(status_code=204)
+ assert ret.status_code == 204
+ assert ret.body is None
+ assert ret.headers == {}
+
+
+def test_append_context():
+ app = APIGatewayRestResolver()
+ app.append_context(is_admin=True)
+ assert app.context.get("is_admin") is True
+
+
+def test_router_append_context():
+ router = Router()
+ router.append_context(is_admin=True)
+ assert router.context.get("is_admin") is True
+
+
+def test_route_context_is_cleared_after_resolve():
+ # GIVEN a Http API V1 proxy type event
+ app = APIGatewayRestResolver()
+ app.append_context(is_admin=True)
+
+ @app.get("/my/path")
+ def my_path():
+ return {"is_admin": app.context["is_admin"]}
+
+ # WHEN event resolution kicks in
+ app.resolve(LOAD_GW_EVENT, {})
+
+ # THEN context should be empty
+ assert app.context == {}
+
+
+def test_router_has_access_to_app_context(json_dump):
+ # GIVEN a Router with registered routes
+ app = ApiGatewayResolver()
+ router = Router()
+ ctx = {"is_admin": True}
+
+ @router.get("/my/path")
+ def my_path():
+ return {"is_admin": router.context["is_admin"]}
+
+ app.include_router(router)
+
+ # WHEN context is added and event resolution kicks in
+ app.append_context(**ctx)
+ ret = app.resolve(LOAD_GW_EVENT, {})
+
+ # THEN response include initial context
+ assert ret["body"] == json_dump(ctx)
+ assert router.context == {}
+
+
+def test_include_router_merges_context():
+ # GIVEN
+ app = APIGatewayRestResolver()
+ router = Router()
+
+ # WHEN
+ app.append_context(is_admin=True)
+ router.append_context(product_access=True)
+
+ app.include_router(router)
+
+ assert app.context == router.context
diff --git a/tests/functional/event_handler/test_appsync.py b/tests/functional/event_handler/test_appsync.py
index 79173e55825..54695eba240 100644
--- a/tests/functional/event_handler/test_appsync.py
+++ b/tests/functional/event_handler/test_appsync.py
@@ -188,3 +188,68 @@ def get_locations2(name: str):
# THEN
assert result1 == "get_locations#value"
assert result2 == "get_locations2#value"
+
+
+def test_append_context():
+ app = AppSyncResolver()
+ app.append_context(is_admin=True)
+ assert app.context.get("is_admin") is True
+
+
+def test_router_append_context():
+ router = Router()
+ router.append_context(is_admin=True)
+ assert router.context.get("is_admin") is True
+
+
+def test_route_context_is_cleared_after_resolve():
+ # GIVEN
+ app = AppSyncResolver()
+ event = {"typeName": "Query", "fieldName": "listLocations", "arguments": {"name": "value"}}
+
+ @app.resolver(field_name="listLocations")
+ def get_locations(name: str):
+ return f"get_locations#{name}"
+
+ # WHEN event resolution kicks in
+ app.append_context(is_admin=True)
+ app.resolve(event, {})
+
+ # THEN context should be empty
+ assert app.context == {}
+
+
+def test_router_has_access_to_app_context():
+ # GIVEN
+ app = AppSyncResolver()
+ router = Router()
+ event = {"typeName": "Query", "fieldName": "listLocations", "arguments": {"name": "value"}}
+
+ @router.resolver(type_name="Query", field_name="listLocations")
+ def get_locations(name: str):
+ if router.context["is_admin"]:
+ return f"get_locations#{name}"
+
+ app.include_router(router)
+
+ # WHEN
+ app.append_context(is_admin=True)
+ ret = app.resolve(event, {})
+
+ # THEN
+ assert ret == "get_locations#value"
+ assert router.context == {}
+
+
+def test_include_router_merges_context():
+ # GIVEN
+ app = AppSyncResolver()
+ router = Router()
+
+ # WHEN
+ app.append_context(is_admin=True)
+ router.append_context(product_access=True)
+
+ app.include_router(router)
+
+ assert app.context == router.context
diff --git a/tests/functional/event_handler/test_lambda_function_url.py b/tests/functional/event_handler/test_lambda_function_url.py
index dc00c535580..4d4d5c39f35 100644
--- a/tests/functional/event_handler/test_lambda_function_url.py
+++ b/tests/functional/event_handler/test_lambda_function_url.py
@@ -1,4 +1,8 @@
-from aws_lambda_powertools.event_handler import LambdaFunctionUrlResolver, Response, content_types
+from aws_lambda_powertools.event_handler import (
+ LambdaFunctionUrlResolver,
+ Response,
+ content_types,
+)
from aws_lambda_powertools.utilities.data_classes import LambdaFunctionUrlEvent
from tests.functional.utils import load_event
diff --git a/tests/functional/feature_flags/test_feature_flags.py b/tests/functional/feature_flags/test_feature_flags.py
index 32d6143ba9a..416fe0be3ba 100644
--- a/tests/functional/feature_flags/test_feature_flags.py
+++ b/tests/functional/feature_flags/test_feature_flags.py
@@ -3,7 +3,10 @@
import pytest
from botocore.config import Config
-from aws_lambda_powertools.utilities.feature_flags import ConfigurationStoreError, schema
+from aws_lambda_powertools.utilities.feature_flags import (
+ ConfigurationStoreError,
+ schema,
+)
from aws_lambda_powertools.utilities.feature_flags.appconfig import AppConfigStore
from aws_lambda_powertools.utilities.feature_flags.exceptions import StoreClientError
from aws_lambda_powertools.utilities.feature_flags.feature_flags import FeatureFlags
diff --git a/tests/functional/feature_flags/test_schema_validation.py b/tests/functional/feature_flags/test_schema_validation.py
index a82f9ecafa7..0366a5609ee 100644
--- a/tests/functional/feature_flags/test_schema_validation.py
+++ b/tests/functional/feature_flags/test_schema_validation.py
@@ -2,7 +2,9 @@
import pytest # noqa: F401
-from aws_lambda_powertools.utilities.feature_flags.exceptions import SchemaValidationError
+from aws_lambda_powertools.utilities.feature_flags.exceptions import (
+ SchemaValidationError,
+)
from aws_lambda_powertools.utilities.feature_flags.schema import (
CONDITION_ACTION,
CONDITION_KEY,
diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py
index 97a9166efa0..f63d7347b1c 100644
--- a/tests/functional/idempotency/test_idempotency.py
+++ b/tests/functional/idempotency/test_idempotency.py
@@ -10,9 +10,19 @@
from botocore import stub
from pydantic import BaseModel
-from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEventV2, event_source
-from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig
-from aws_lambda_powertools.utilities.idempotency.base import MAX_RETRIES, IdempotencyHandler, _prepare_data
+from aws_lambda_powertools.utilities.data_classes import (
+ APIGatewayProxyEventV2,
+ event_source,
+)
+from aws_lambda_powertools.utilities.idempotency import (
+ DynamoDBPersistenceLayer,
+ IdempotencyConfig,
+)
+from aws_lambda_powertools.utilities.idempotency.base import (
+ MAX_RETRIES,
+ IdempotencyHandler,
+ _prepare_data,
+)
from aws_lambda_powertools.utilities.idempotency.exceptions import (
IdempotencyAlreadyInProgressError,
IdempotencyInconsistentStateError,
@@ -21,8 +31,14 @@
IdempotencyPersistenceLayerError,
IdempotencyValidationError,
)
-from aws_lambda_powertools.utilities.idempotency.idempotency import idempotent, idempotent_function
-from aws_lambda_powertools.utilities.idempotency.persistence.base import BasePersistenceLayer, DataRecord
+from aws_lambda_powertools.utilities.idempotency.idempotency import (
+ idempotent,
+ idempotent_function,
+)
+from aws_lambda_powertools.utilities.idempotency.persistence.base import (
+ BasePersistenceLayer,
+ DataRecord,
+)
from aws_lambda_powertools.utilities.validation import envelopes, validator
from tests.functional.idempotency.utils import (
build_idempotency_put_item_stub,
diff --git a/tests/functional/parser/test_cloudwatch.py b/tests/functional/parser/test_cloudwatch.py
index 7290d0bffcb..5fa197bb792 100644
--- a/tests/functional/parser/test_cloudwatch.py
+++ b/tests/functional/parser/test_cloudwatch.py
@@ -5,8 +5,15 @@
import pytest
-from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, event_parser
-from aws_lambda_powertools.utilities.parser.models import CloudWatchLogsLogEvent, CloudWatchLogsModel
+from aws_lambda_powertools.utilities.parser import (
+ ValidationError,
+ envelopes,
+ event_parser,
+)
+from aws_lambda_powertools.utilities.parser.models import (
+ CloudWatchLogsLogEvent,
+ CloudWatchLogsModel,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
from tests.functional.parser.schemas import MyCloudWatchBusiness
from tests.functional.utils import load_event
diff --git a/tests/functional/parser/test_dynamodb.py b/tests/functional/parser/test_dynamodb.py
index 9917fac234b..e6238b00b83 100644
--- a/tests/functional/parser/test_dynamodb.py
+++ b/tests/functional/parser/test_dynamodb.py
@@ -2,7 +2,11 @@
import pytest
-from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, event_parser
+from aws_lambda_powertools.utilities.parser import (
+ ValidationError,
+ envelopes,
+ event_parser,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
from tests.functional.parser.schemas import MyAdvancedDynamoBusiness, MyDynamoBusiness
from tests.functional.utils import load_event
diff --git a/tests/functional/parser/test_eventbridge.py b/tests/functional/parser/test_eventbridge.py
index 6242403ab35..ca41e1a4bc5 100644
--- a/tests/functional/parser/test_eventbridge.py
+++ b/tests/functional/parser/test_eventbridge.py
@@ -2,9 +2,16 @@
import pytest
-from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, event_parser
+from aws_lambda_powertools.utilities.parser import (
+ ValidationError,
+ envelopes,
+ event_parser,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
-from tests.functional.parser.schemas import MyAdvancedEventbridgeBusiness, MyEventbridgeBusiness
+from tests.functional.parser.schemas import (
+ MyAdvancedEventbridgeBusiness,
+ MyEventbridgeBusiness,
+)
from tests.functional.utils import load_event
diff --git a/tests/functional/parser/test_kinesis.py b/tests/functional/parser/test_kinesis.py
index 552cb6cef68..13f1e55b479 100644
--- a/tests/functional/parser/test_kinesis.py
+++ b/tests/functional/parser/test_kinesis.py
@@ -2,8 +2,15 @@
import pytest
-from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, event_parser
-from aws_lambda_powertools.utilities.parser.models import KinesisDataStreamModel, KinesisDataStreamRecordPayload
+from aws_lambda_powertools.utilities.parser import (
+ ValidationError,
+ envelopes,
+ event_parser,
+)
+from aws_lambda_powertools.utilities.parser.models import (
+ KinesisDataStreamModel,
+ KinesisDataStreamRecordPayload,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
from tests.functional.parser.schemas import MyKinesisBusiness
from tests.functional.utils import load_event
diff --git a/tests/functional/parser/test_parser.py b/tests/functional/parser/test_parser.py
index 5e9e40faec4..d2a77f10998 100644
--- a/tests/functional/parser/test_parser.py
+++ b/tests/functional/parser/test_parser.py
@@ -3,7 +3,11 @@
import pytest
-from aws_lambda_powertools.utilities.parser import ValidationError, event_parser, exceptions
+from aws_lambda_powertools.utilities.parser import (
+ ValidationError,
+ event_parser,
+ exceptions,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
diff --git a/tests/functional/parser/test_sns.py b/tests/functional/parser/test_sns.py
index b0d9ff69a9b..6042322e88a 100644
--- a/tests/functional/parser/test_sns.py
+++ b/tests/functional/parser/test_sns.py
@@ -3,7 +3,11 @@
import pytest
-from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, event_parser
+from aws_lambda_powertools.utilities.parser import (
+ ValidationError,
+ envelopes,
+ event_parser,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
from tests.functional.parser.schemas import MyAdvancedSnsBusiness, MySnsBusiness
from tests.functional.utils import load_event
diff --git a/tests/functional/parser/test_sqs.py b/tests/functional/parser/test_sqs.py
index 7ca883616f2..cd86c68a4aa 100644
--- a/tests/functional/parser/test_sqs.py
+++ b/tests/functional/parser/test_sqs.py
@@ -2,7 +2,11 @@
import pytest
-from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, event_parser
+from aws_lambda_powertools.utilities.parser import (
+ ValidationError,
+ envelopes,
+ event_parser,
+)
from aws_lambda_powertools.utilities.typing import LambdaContext
from tests.functional.parser.schemas import MyAdvancedSqsBusiness, MySqsBusiness
from tests.functional.utils import load_event
diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py
index dbef57162e2..1f8c0cef955 100644
--- a/tests/functional/test_data_classes.py
+++ b/tests/functional/test_data_classes.py
@@ -49,7 +49,9 @@
AppSyncResolverEventInfo,
get_identity_object,
)
-from aws_lambda_powertools.utilities.data_classes.code_pipeline_job_event import CodePipelineData
+from aws_lambda_powertools.utilities.data_classes.code_pipeline_job_event import (
+ CodePipelineData,
+)
from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import (
CreateAuthChallengeTriggerEvent,
CustomMessageTriggerEvent,
@@ -62,7 +64,10 @@
UserMigrationTriggerEvent,
VerifyAuthChallengeResponseTriggerEvent,
)
-from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent, DictWrapper
+from aws_lambda_powertools.utilities.data_classes.common import (
+ BaseProxyEvent,
+ DictWrapper,
+)
from aws_lambda_powertools.utilities.data_classes.connect_contact_flow_event import (
ConnectContactFlowChannel,
ConnectContactFlowEndpointType,
@@ -74,10 +79,13 @@
AttributeValueType,
DynamoDBRecordEventName,
DynamoDBStreamEvent,
+ StreamRecord,
StreamViewType,
)
from aws_lambda_powertools.utilities.data_classes.event_source import event_source
-from aws_lambda_powertools.utilities.data_classes.s3_object_event import S3ObjectLambdaEvent
+from aws_lambda_powertools.utilities.data_classes.s3_object_event import (
+ S3ObjectLambdaEvent,
+)
from tests.functional.utils import load_event
@@ -101,6 +109,19 @@ def message(self) -> str:
assert DataClassSample(data1).raw_event is data1
+def test_dict_wrapper_implements_mapping():
+ class DataClassSample(DictWrapper):
+ pass
+
+ data = {"message": "foo1"}
+ event_source = DataClassSample(data)
+ assert len(event_source) == len(data)
+ assert list(event_source) == list(data)
+ assert event_source.keys() == data.keys()
+ assert list(event_source.values()) == list(data.values())
+ assert event_source.items() == data.items()
+
+
def test_cloud_watch_dashboard_event():
event = CloudWatchDashboardCustomWidgetEvent(load_event("cloudWatchDashboardEvent.json"))
assert event.describe is False
@@ -617,6 +638,23 @@ def test_dynamo_attribute_value_type_error():
print(attribute_value.get_type)
+def test_stream_record_keys_with_valid_keys():
+ attribute_value = {"Foo": "Bar"}
+ record = StreamRecord({"Keys": {"Key1": attribute_value}})
+ assert record.keys == {"Key1": AttributeValue(attribute_value)}
+
+
+def test_stream_record_keys_with_no_keys():
+ record = StreamRecord({})
+ assert record.keys is None
+
+
+def test_stream_record_keys_overrides_dict_wrapper_keys():
+ data = {"Keys": {"key1": {"attr1": "value1"}}}
+ record = StreamRecord(data)
+ assert record.keys != data.keys()
+
+
def test_event_bridge_event():
event = EventBridgeEvent(load_event("eventBridgeEvent.json"))
diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py
index c8b3dc61755..7eb3018ef64 100644
--- a/tests/functional/test_logger.py
+++ b/tests/functional/test_logger.py
@@ -5,6 +5,7 @@
import random
import re
import string
+import warnings
from ast import Dict
from collections import namedtuple
from datetime import datetime, timezone
@@ -12,10 +13,13 @@
import pytest
-from aws_lambda_powertools import Logger, Tracer
+from aws_lambda_powertools import Logger, Tracer, set_package_logger_handler
from aws_lambda_powertools.logging import correlation_paths
from aws_lambda_powertools.logging.exceptions import InvalidLoggerSamplingRateError
-from aws_lambda_powertools.logging.formatter import BasePowertoolsFormatter, LambdaPowertoolsFormatter
+from aws_lambda_powertools.logging.formatter import (
+ BasePowertoolsFormatter,
+ LambdaPowertoolsFormatter,
+)
from aws_lambda_powertools.logging.logger import set_package_logger
from aws_lambda_powertools.shared import constants
from aws_lambda_powertools.utilities.data_classes import S3Event, event_source
@@ -824,3 +828,33 @@ def handler(event, context, planet, str_end="."):
log = capture_logging_output(stdout)
assert log["message"] == "Hello World!"
+
+
+def test_set_package_logger_handler_with_powertools_debug_env_var(stdout, monkeypatch: pytest.MonkeyPatch):
+ # GIVEN POWERTOOLS_DEBUG is set
+ monkeypatch.setenv(constants.POWERTOOLS_DEBUG_ENV, "1")
+ logger = logging.getLogger("aws_lambda_powertools")
+
+ # WHEN set_package_logger is used at initialization
+ # and any Powertools operation is used (e.g., Tracer)
+ set_package_logger_handler(stream=stdout)
+ Tracer(disabled=True)
+
+ # THEN Tracer debug log statement should be logged
+ output = stdout.getvalue()
+ assert "Tracing has been disabled" in output
+ assert logger.level == logging.DEBUG
+
+
+def test_powertools_debug_env_var_warning(monkeypatch: pytest.MonkeyPatch):
+ # GIVEN POWERTOOLS_DEBUG is set
+ monkeypatch.setenv(constants.POWERTOOLS_DEBUG_ENV, "1")
+ warning_message = "POWERTOOLS_DEBUG environment variable is enabled. Setting logging level to DEBUG."
+
+ # WHEN set_package_logger is used at initialization
+ # THEN a warning should be emitted
+ with warnings.catch_warnings(record=True) as w:
+ warnings.simplefilter("default")
+ set_package_logger_handler()
+ assert len(w) == 1
+ assert str(w[0].message) == warning_message
diff --git a/tests/functional/test_logger_powertools_formatter.py b/tests/functional/test_logger_powertools_formatter.py
index c9f970e29a5..7276f49d487 100644
--- a/tests/functional/test_logger_powertools_formatter.py
+++ b/tests/functional/test_logger_powertools_formatter.py
@@ -1,6 +1,7 @@
"""aws_lambda_logging tests."""
import io
import json
+import os
import random
import string
import time
@@ -288,3 +289,23 @@ def test_log_formatting(stdout, service_name):
# THEN the formatting should be applied (NB. this is valid json, but hasn't be parsed)
assert log_dict["message"] == '["foo bar 123 [1, None]", null]'
+
+
+def test_log_json_indent_compact_indent(stdout, service_name, monkeypatch):
+ # GIVEN a logger with default settings and WHEN POWERTOOLS_DEV is not set
+ monkeypatch.delenv(name="POWERTOOLS_DEV", raising=False)
+ logger = Logger(service=service_name, stream=stdout)
+ logger.info("Test message")
+ # THEN the json should not have multiple lines
+ new_lines = stdout.getvalue().count(os.linesep)
+ assert new_lines == 1
+
+
+def test_log_json_pretty_indent(stdout, service_name, monkeypatch):
+ # GIVEN a logger with default settings and WHEN POWERTOOLS_DEV=="true"
+ monkeypatch.setenv(name="POWERTOOLS_DEV", value="true")
+ logger = Logger(service=service_name, stream=stdout)
+ logger.info("Test message")
+ # THEN the json should contain more than line
+ new_lines = stdout.getvalue().count(os.linesep)
+ assert new_lines > 1
diff --git a/tests/functional/test_logger_utils.py b/tests/functional/test_logger_utils.py
index 09e4be56d36..0e0c7fc7766 100644
--- a/tests/functional/test_logger_utils.py
+++ b/tests/functional/test_logger_utils.py
@@ -161,15 +161,17 @@ def test_copy_config_to_ext_loggers_clean_old_handlers(stdout, logger, log_level
assert isinstance(logger.handlers[0].formatter, formatter.LambdaPowertoolsFormatter)
-def test_copy_config_to_ext_loggers_custom_log_level(stdout, logger, log_level):
+@pytest.mark.parametrize("level_to_set", ["WARNING", 30])
+def test_copy_config_to_ext_loggers_custom_log_level(stdout, logger, log_level, level_to_set):
# GIVEN an external logger and powertools logger initialized
logger = logger()
powertools_logger = Logger(service=service_name(), level=log_level.CRITICAL.value, stream=stdout)
- level = log_level.WARNING.name
# WHEN configuration copied from powertools logger to INCLUDED external logger
# AND external logger used with custom log_level
- utils.copy_config_to_registered_loggers(source_logger=powertools_logger, include={logger.name}, log_level=level)
+ utils.copy_config_to_registered_loggers(
+ source_logger=powertools_logger, include={logger.name}, log_level=level_to_set
+ )
msg = "test message4"
logger.warning(msg)
log = capture_logging_output(stdout)
@@ -263,3 +265,26 @@ def test_copy_config_to_ext_loggers_no_duplicate_logs(stdout, logger, log_level)
logs = capture_multiple_logging_statements_output(stdout)
assert {"message": msg} not in logs
assert sum(msg in log.values() for log in logs) == 1
+
+
+def test_logger_name_is_included_during_copy(stdout, logger, log_level):
+ # GIVEN two external loggers and powertools logger initialized
+ logger_1: logging.Logger = logger()
+ logger_2: logging.Logger = logger()
+ msg = "test message1"
+
+ powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout)
+
+ # WHEN configuration copied from powertools logger to ALL external loggers
+ # AND external loggers used
+ utils.copy_config_to_registered_loggers(source_logger=powertools_logger, include={logger_1.name, logger_2.name})
+ logger_1.info(msg)
+ logger_2.info(msg)
+ powertools_logger.info(msg)
+
+ logger1_log, logger2_log, pt_log = capture_multiple_logging_statements_output(stdout)
+
+ # THEN name attribute should be present in all loggers
+ assert logger1_log["name"] == logger_1.name
+ assert logger2_log["name"] == logger_2.name
+ assert pt_log["name"] == powertools_logger.name
diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py
index 1a52d84d4fe..e0ce7f84dc9 100644
--- a/tests/functional/test_metrics.py
+++ b/tests/functional/test_metrics.py
@@ -6,7 +6,12 @@
import pytest
from aws_lambda_powertools import Metrics, single_metric
-from aws_lambda_powertools.metrics import MetricUnit, MetricUnitError, MetricValueError, SchemaValidationError
+from aws_lambda_powertools.metrics import (
+ MetricUnit,
+ MetricUnitError,
+ MetricValueError,
+ SchemaValidationError,
+)
from aws_lambda_powertools.metrics import metrics as metrics_global
from aws_lambda_powertools.metrics.base import MAX_DIMENSIONS, MetricManager
diff --git a/tests/functional/test_middleware_factory.py b/tests/functional/test_middleware_factory.py
index ee8078f801a..fb868cef0ee 100644
--- a/tests/functional/test_middleware_factory.py
+++ b/tests/functional/test_middleware_factory.py
@@ -4,7 +4,9 @@
import pytest
from aws_lambda_powertools.middleware_factory import lambda_handler_decorator
-from aws_lambda_powertools.middleware_factory.exceptions import MiddlewareInvalidArgumentError
+from aws_lambda_powertools.middleware_factory.exceptions import (
+ MiddlewareInvalidArgumentError,
+)
@pytest.fixture
diff --git a/tests/functional/test_shared_functions.py b/tests/functional/test_shared_functions.py
index c71b7239739..a8e1755bdd5 100644
--- a/tests/functional/test_shared_functions.py
+++ b/tests/functional/test_shared_functions.py
@@ -1,6 +1,15 @@
+import warnings
+
import pytest
-from aws_lambda_powertools.shared.functions import resolve_env_var_choice, resolve_truthy_env_var_choice, strtobool
+from aws_lambda_powertools.shared import constants
+from aws_lambda_powertools.shared.functions import (
+ powertools_debug_is_set,
+ powertools_dev_is_set,
+ resolve_env_var_choice,
+ resolve_truthy_env_var_choice,
+ strtobool,
+)
def test_resolve_env_var_choice_explicit_wins_over_env_var():
@@ -27,3 +36,31 @@ def test_strtobool_value_error():
with pytest.raises(ValueError) as exp:
strtobool("fail")
assert str(exp.value) == "invalid truth value 'fail'"
+
+
+def test_powertools_dev_warning(monkeypatch: pytest.MonkeyPatch):
+ # GIVEN POWERTOOLS_DEBUG is set
+ monkeypatch.setenv(constants.POWERTOOLS_DEV_ENV, "1")
+ warning_message = "POWERTOOLS_DEV environment variable is enabled. Increasing verbosity across utilities."
+
+ # WHEN set_package_logger is used at initialization
+ # THEN a warning should be emitted
+ with warnings.catch_warnings(record=True) as w:
+ warnings.simplefilter("default")
+ powertools_dev_is_set()
+ assert len(w) == 1
+ assert str(w[0].message) == warning_message
+
+
+def test_powertools_debug_warning(monkeypatch: pytest.MonkeyPatch):
+ # GIVEN POWERTOOLS_DEBUG is set
+ monkeypatch.setenv(constants.POWERTOOLS_DEBUG_ENV, "1")
+ warning_message = "POWERTOOLS_DEBUG environment variable is enabled. Setting logging level to DEBUG."
+
+ # WHEN set_package_logger is used at initialization
+ # THEN a warning should be emitted
+ with warnings.catch_warnings(record=True) as w:
+ warnings.simplefilter("default")
+ powertools_debug_is_set()
+ assert len(w) == 1
+ assert str(w[0].message) == warning_message
diff --git a/tests/functional/test_utilities_batch.py b/tests/functional/test_utilities_batch.py
index a5e1e706437..b5489fb7c62 100644
--- a/tests/functional/test_utilities_batch.py
+++ b/tests/functional/test_utilities_batch.py
@@ -3,6 +3,7 @@
from random import randint
from typing import Callable, Dict, Optional
from unittest.mock import patch
+from uuid import uuid4
import pytest
from botocore.config import Config
@@ -15,15 +16,31 @@
batch_processor,
sqs_batch_processor,
)
-from aws_lambda_powertools.utilities.batch.exceptions import BatchProcessingError, SQSBatchProcessingError
-from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord
-from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord
+from aws_lambda_powertools.utilities.batch.exceptions import (
+ BatchProcessingError,
+ SQSBatchProcessingError,
+)
+from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import (
+ DynamoDBRecord,
+)
+from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import (
+ KinesisStreamRecord,
+)
from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord
from aws_lambda_powertools.utilities.parser import BaseModel, validator
-from aws_lambda_powertools.utilities.parser.models import DynamoDBStreamChangedRecordModel, DynamoDBStreamRecordModel
-from aws_lambda_powertools.utilities.parser.models import KinesisDataStreamRecord as KinesisDataStreamRecordModel
-from aws_lambda_powertools.utilities.parser.models import KinesisDataStreamRecordPayload, SqsRecordModel
+from aws_lambda_powertools.utilities.parser.models import (
+ DynamoDBStreamChangedRecordModel,
+ DynamoDBStreamRecordModel,
+)
+from aws_lambda_powertools.utilities.parser.models import (
+ KinesisDataStreamRecord as KinesisDataStreamRecordModel,
+)
+from aws_lambda_powertools.utilities.parser.models import (
+ KinesisDataStreamRecordPayload,
+ SqsRecordModel,
+)
from aws_lambda_powertools.utilities.parser.types import Literal
+from aws_lambda_powertools.utilities.typing import LambdaContext
from tests.functional.utils import b64_to_str, str_to_b64
@@ -167,6 +184,18 @@ def factory(item: Dict) -> str:
return factory
+@pytest.fixture(scope="module")
+def lambda_context() -> LambdaContext:
+ class DummyLambdaContext:
+ def __init__(self):
+ self.function_name = "test-func"
+ self.memory_limit_in_mb = 128
+ self.invoked_function_arn = "arn:aws:lambda:eu-west-1:809313241234:function:test-func"
+ self.aws_request_id = f"{uuid4()}"
+
+ return DummyLambdaContext
+
+
@pytest.mark.parametrize(
"success_messages_count",
([1, 18, 34]),
@@ -908,3 +937,41 @@ def lambda_handler(event, context):
# THEN raise BatchProcessingError
assert "All records failed processing. " in str(e.value)
+
+
+def test_batch_processor_handler_receives_lambda_context(sqs_event_factory, lambda_context: LambdaContext):
+ # GIVEN
+ def record_handler(record, lambda_context: LambdaContext = None):
+ return lambda_context.function_name == "test-func"
+
+ first_record = SQSRecord(sqs_event_factory("success"))
+ event = {"Records": [first_record.raw_event]}
+
+ processor = BatchProcessor(event_type=EventType.SQS)
+
+ @batch_processor(record_handler=record_handler, processor=processor)
+ def lambda_handler(event, context):
+ return processor.response()
+
+ # WHEN/THEN
+ lambda_handler(event, lambda_context())
+
+
+def test_batch_processor_context_manager_handler_receives_lambda_context(
+ sqs_event_factory, lambda_context: LambdaContext
+):
+ # GIVEN
+ def record_handler(record, lambda_context: LambdaContext = None):
+ return lambda_context.function_name == "test-func"
+
+ first_record = SQSRecord(sqs_event_factory("success"))
+ event = {"Records": [first_record.raw_event]}
+
+ processor = BatchProcessor(event_type=EventType.SQS)
+
+ def lambda_handler(event, context):
+ with processor(records=event["Records"], handler=record_handler, lambda_context=context) as batch:
+ batch.process()
+
+ # WHEN/THEN
+ lambda_handler(event, lambda_context())
diff --git a/tests/functional/test_utilities_typing.py b/tests/functional/test_utilities_typing.py
index 8522cfcbf99..7d2a609fbf7 100644
--- a/tests/functional/test_utilities_typing.py
+++ b/tests/functional/test_utilities_typing.py
@@ -1,7 +1,13 @@
from aws_lambda_powertools.utilities.typing import LambdaContext
-from aws_lambda_powertools.utilities.typing.lambda_client_context import LambdaClientContext
-from aws_lambda_powertools.utilities.typing.lambda_client_context_mobile_client import LambdaClientContextMobileClient
-from aws_lambda_powertools.utilities.typing.lambda_cognito_identity import LambdaCognitoIdentity
+from aws_lambda_powertools.utilities.typing.lambda_client_context import (
+ LambdaClientContext,
+)
+from aws_lambda_powertools.utilities.typing.lambda_client_context_mobile_client import (
+ LambdaClientContextMobileClient,
+)
+from aws_lambda_powertools.utilities.typing.lambda_cognito_identity import (
+ LambdaCognitoIdentity,
+)
def test_typing():
diff --git a/tests/functional/validator/test_validator.py b/tests/functional/validator/test_validator.py
index cd5c4168f56..c3e89c1bcfe 100644
--- a/tests/functional/validator/test_validator.py
+++ b/tests/functional/validator/test_validator.py
@@ -4,7 +4,12 @@
import pytest
from jmespath import functions
-from aws_lambda_powertools.utilities.validation import envelopes, exceptions, validate, validator
+from aws_lambda_powertools.utilities.validation import (
+ envelopes,
+ exceptions,
+ validate,
+ validator,
+)
def test_validate_raw_event(schema, raw_event):