From 14e00ccae083eebb1f3932f22b387984829692ac Mon Sep 17 00:00:00 2001 From: Vladimir Prudnikov Date: Wed, 1 Feb 2023 16:50:40 +0300 Subject: [PATCH 01/35] feat(metrics): add default_dimensions to single_metric (#1880) Co-authored-by: Ruben Fonseca --- aws_lambda_powertools/metrics/base.py | 11 ++++++- docs/core/metrics.md | 14 ++++++++ .../src/single_metric_default_dimensions.py | 14 ++++++++ ...ingle_metric_default_dimensions_inherit.py | 17 ++++++++++ tests/functional/test_metrics.py | 32 +++++++++++++++++++ 5 files changed, 87 insertions(+), 1 deletion(-) create mode 100644 examples/metrics/src/single_metric_default_dimensions.py create mode 100644 examples/metrics/src/single_metric_default_dimensions_inherit.py diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py index b032d181811..c2949ab43da 100644 --- a/aws_lambda_powertools/metrics/base.py +++ b/aws_lambda_powertools/metrics/base.py @@ -463,7 +463,11 @@ def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> N @contextmanager def single_metric( - name: str, unit: MetricUnit, value: float, namespace: Optional[str] = None + name: str, + unit: MetricUnit, + value: float, + namespace: Optional[str] = None, + default_dimensions: Optional[Dict[str, str]] = None, ) -> Generator[SingleMetric, None, None]: """Context manager to simplify creation of a single metric @@ -516,6 +520,11 @@ def single_metric( try: metric: SingleMetric = SingleMetric(namespace=namespace) metric.add_metric(name=name, unit=unit, value=value) + + if default_dimensions: + for dim_name, dim_value in default_dimensions.items(): + metric.add_dimension(name=dim_name, value=dim_value) + yield metric metric_set = metric.serialize_metric_set() finally: diff --git a/docs/core/metrics.md b/docs/core/metrics.md index e02b247f117..ca42b632f84 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -216,6 +216,20 @@ CloudWatch EMF uses the same dimensions across all your metrics. Use `single_met --8<-- "examples/metrics/src/single_metric_output.json" ``` +By default it will skip all previously defined dimensions including default dimensions. Use `default_dimensions` keyword argument if you want to reuse default dimensions or specify custom dimensions from a dictionary. + +=== "single_metric_default_dimensions_inherit.json" + + ```json hl_lines="10 15" + --8<-- "examples/metrics/src/single_metric_default_dimensions_inherit.py" + ``` + +=== "single_metric_default_dimensions.py" + + ```python hl_lines="12" + --8<-- "examples/metrics/src/single_metric_default_dimensions.py" + ``` + ### Flushing metrics manually If you prefer not to use `log_metrics` because you might want to encapsulate additional logic when doing so, you can manually flush and clear metrics as follows: diff --git a/examples/metrics/src/single_metric_default_dimensions.py b/examples/metrics/src/single_metric_default_dimensions.py new file mode 100644 index 00000000000..3ed6c5e9035 --- /dev/null +++ b/examples/metrics/src/single_metric_default_dimensions.py @@ -0,0 +1,14 @@ +import os + +from aws_lambda_powertools import single_metric +from aws_lambda_powertools.metrics import MetricUnit +from aws_lambda_powertools.utilities.typing import LambdaContext + +STAGE = os.getenv("STAGE", "dev") + + +def lambda_handler(event: dict, context: LambdaContext): + with single_metric( + name="RecordsCount", unit=MetricUnit.Count, value=10, default_dimensions={"environment": STAGE} + ) as metric: + metric.add_dimension(name="TableName", value="Users") diff --git a/examples/metrics/src/single_metric_default_dimensions_inherit.py b/examples/metrics/src/single_metric_default_dimensions_inherit.py new file mode 100644 index 00000000000..92a27d6e0d3 --- /dev/null +++ b/examples/metrics/src/single_metric_default_dimensions_inherit.py @@ -0,0 +1,17 @@ +import os + +from aws_lambda_powertools import single_metric +from aws_lambda_powertools.metrics import Metrics, MetricUnit +from aws_lambda_powertools.utilities.typing import LambdaContext + +STAGE = os.getenv("STAGE", "dev") + +metrics = Metrics() +metrics.set_default_dimensions(environment=STAGE) + + +def lambda_handler(event: dict, context: LambdaContext): + with single_metric( + name="RecordsCount", unit=MetricUnit.Count, value=10, default_dimensions=metrics.default_dimensions + ) as metric: + metric.add_dimension(name="TableName", value="Users") diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py index c45c138ad59..d15b105057e 100644 --- a/tests/functional/test_metrics.py +++ b/tests/functional/test_metrics.py @@ -156,6 +156,38 @@ def test_single_metric_logs_one_metric_only(capsys, metric, dimension, namespace assert expected == output +def test_single_metric_default_dimensions(capsys, metric, dimension, namespace): + # GIVEN we provide default dimensions + # WHEN using single_metric context manager + default_dimensions = {dimension["name"]: dimension["value"]} + with single_metric(namespace=namespace, default_dimensions=default_dimensions, **metric) as my_metric: + my_metric.add_metric(name="second_metric", unit="Count", value=1) + + output = capture_metrics_output(capsys) + expected = serialize_single_metric(metric=metric, dimension=dimension, namespace=namespace) + + # THEN we should have default dimension added to the metric + remove_timestamp(metrics=[output, expected]) + assert expected == output + + +def test_single_metric_default_dimensions_inherit(capsys, metric, dimension, namespace): + # GIVEN we provide Metrics default dimensions + # WHEN using single_metric context manager + metrics = Metrics() + default_dimensions = {dimension["name"]: dimension["value"]} + metrics.set_default_dimensions(**default_dimensions) + with single_metric(namespace=namespace, default_dimensions=metrics.default_dimensions, **metric) as my_metric: + my_metric.add_metric(name="second_metric", unit="Count", value=1) + + output = capture_metrics_output(capsys) + expected = serialize_single_metric(metric=metric, dimension=dimension, namespace=namespace) + + # THEN we should have default dimension added to the metric + remove_timestamp(metrics=[output, expected]) + assert expected == output + + def test_log_metrics(capsys, metrics, dimensions, namespace): # GIVEN Metrics is initialized my_metrics = Metrics(namespace=namespace) From 6dd03e110f06e018439b48e930f5a98e4f91db30 Mon Sep 17 00:00:00 2001 From: Release bot Date: Wed, 1 Feb 2023 13:51:06 +0000 Subject: [PATCH 02/35] update changelog with latest changes --- CHANGELOG.md | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bde94fa2806..c075598044c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ # Unreleased +## Features + +* **metrics:** add default_dimensions to single_metric ([#1880](https://github.com/awslabs/aws-lambda-powertools-python/issues/1880)) + + + +## [v2.7.1] - 2023-02-01 ## Bug Fixes * parallel_run should fail when e2e tests fail @@ -24,19 +31,23 @@ ## Maintenance +* update v2 layer ARN on documentation * **deps:** bump docker/setup-buildx-action from 2.0.0 to 2.4.0 ([#1873](https://github.com/awslabs/aws-lambda-powertools-python/issues/1873)) * **deps:** bump dependabot/fetch-metadata from 1.3.5 to 1.3.6 ([#1855](https://github.com/awslabs/aws-lambda-powertools-python/issues/1855)) -* **deps-dev:** bump flake8-bugbear from 22.12.6 to 23.1.20 ([#1854](https://github.com/awslabs/aws-lambda-powertools-python/issues/1854)) -* **deps-dev:** bump mkdocs-material from 9.0.6 to 9.0.8 ([#1874](https://github.com/awslabs/aws-lambda-powertools-python/issues/1874)) -* **deps-dev:** bump isort from 5.11.4 to 5.11.5 ([#1875](https://github.com/awslabs/aws-lambda-powertools-python/issues/1875)) * **deps-dev:** bump mypy-boto3-s3 from 1.26.0.post1 to 1.26.58 ([#1868](https://github.com/awslabs/aws-lambda-powertools-python/issues/1868)) +* **deps-dev:** bump isort from 5.11.4 to 5.11.5 ([#1875](https://github.com/awslabs/aws-lambda-powertools-python/issues/1875)) +* **deps-dev:** bump aws-cdk-lib from 2.62.1 to 2.62.2 ([#1869](https://github.com/awslabs/aws-lambda-powertools-python/issues/1869)) +* **deps-dev:** bump mkdocs-material from 9.0.6 to 9.0.8 ([#1874](https://github.com/awslabs/aws-lambda-powertools-python/issues/1874)) * **deps-dev:** bump aws-cdk-lib from 2.62.0 to 2.62.1 ([#1866](https://github.com/awslabs/aws-lambda-powertools-python/issues/1866)) * **deps-dev:** bump mypy-boto3-cloudformation from 1.26.35.post1 to 1.26.57 ([#1865](https://github.com/awslabs/aws-lambda-powertools-python/issues/1865)) * **deps-dev:** bump coverage from 7.0.5 to 7.1.0 ([#1862](https://github.com/awslabs/aws-lambda-powertools-python/issues/1862)) * **deps-dev:** bump aws-cdk-lib from 2.61.1 to 2.62.0 ([#1863](https://github.com/awslabs/aws-lambda-powertools-python/issues/1863)) -* **deps-dev:** bump aws-cdk-lib from 2.62.1 to 2.62.2 ([#1869](https://github.com/awslabs/aws-lambda-powertools-python/issues/1869)) +* **deps-dev:** bump flake8-bugbear from 22.12.6 to 23.1.20 ([#1854](https://github.com/awslabs/aws-lambda-powertools-python/issues/1854)) * **deps-dev:** bump mypy-boto3-lambda from 1.26.49 to 1.26.55 ([#1856](https://github.com/awslabs/aws-lambda-powertools-python/issues/1856)) +## Reverts +* fix(tests): remove custom workers + ## [v2.7.0] - 2023-01-24 @@ -2815,7 +2826,8 @@ * Merge pull request [#5](https://github.com/awslabs/aws-lambda-powertools-python/issues/5) from jfuss/feat/python38 -[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.7.0...HEAD +[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.7.1...HEAD +[v2.7.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.7.0...v2.7.1 [v2.7.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.6.0...v2.7.0 [v2.6.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.5.0...v2.6.0 [v2.5.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.4.0...v2.5.0 From ef8f44609a6cf932582c2863d9e5f2fe765dfc82 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 Feb 2023 20:22:51 +0000 Subject: [PATCH 03/35] chore(deps-dev): bump aws-cdk-lib from 2.62.2 to 2.63.0 (#1887) Bumps [aws-cdk-lib](https://github.com/aws/aws-cdk) from 2.62.2 to 2.63.0. - [Release notes](https://github.com/aws/aws-cdk/releases) - [Changelog](https://github.com/aws/aws-cdk/blob/main/CHANGELOG.v2.md) - [Commits](https://github.com/aws/aws-cdk/compare/v2.62.2...v2.63.0) --- updated-dependencies: - dependency-name: aws-cdk-lib dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 15 ++++++++------- pyproject.toml | 2 +- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index 379a24bb5e5..b15102744c6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -131,22 +131,22 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-lib" -version = "2.62.2" +version = "2.63.0" description = "Version 2 of the AWS Cloud Development Kit library" category = "dev" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk-lib-2.62.2.tar.gz", hash = "sha256:f85000438d849a0522ffcd8e7cb5a70be5fa34339082d4d569734169c6d37b4d"}, - {file = "aws_cdk_lib-2.62.2-py3-none-any.whl", hash = "sha256:03dfb8303b00333177b18e3b60c95d738adf4d90086a5b1e707e896fdc234d52"}, + {file = "aws-cdk-lib-2.63.0.tar.gz", hash = "sha256:19cdc076967d502c9e75d7af960b857782c3cf5b45663acb442660a3912384a8"}, + {file = "aws_cdk_lib-2.63.0-py3-none-any.whl", hash = "sha256:e0861042302a6ebe649403e8f0249273da21852a06116c4022c73a41fa0c8c5a"}, ] [package.dependencies] -"aws-cdk.asset-awscli-v1" = ">=2.2.49,<3.0.0" +"aws-cdk.asset-awscli-v1" = ">=2.2.52,<3.0.0" "aws-cdk.asset-kubectl-v20" = ">=2.1.1,<3.0.0" -"aws-cdk.asset-node-proxy-agent-v5" = ">=2.0.38,<3.0.0" +"aws-cdk.asset-node-proxy-agent-v5" = ">=2.0.42,<3.0.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.73.0,<2.0.0" +jsii = ">=1.74.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" @@ -1219,6 +1219,7 @@ category = "dev" optional = false python-versions = "*" files = [ + {file = "junit-xml-1.9.tar.gz", hash = "sha256:de16a051990d4e25a3982b2dd9e89d671067548718866416faec14d9de56db9f"}, {file = "junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732"}, ] @@ -2831,4 +2832,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "b897ddb6a5d83dd5acce3c612c912af40b6c6c7821abc4804a93037e2f26639b" +content-hash = "389c90399ca16ea4b5f3f08b7208eb472d20fb8e0d479c22083d16a6ad0879d6" diff --git a/pyproject.toml b/pyproject.toml index da3333be635..7c9825dcc06 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ mkdocs-git-revision-date-plugin = "^0.3.2" mike = "^1.1.2" retry = "^0.9.2" pytest-xdist = "^3.1.0" -aws-cdk-lib = "^2.62.2" +aws-cdk-lib = "^2.63.0" "aws-cdk.aws-apigatewayv2-alpha" = "^2.38.1-alpha.0" "aws-cdk.aws-apigatewayv2-integrations-alpha" = "^2.38.1-alpha.0" "aws-cdk.aws-apigatewayv2-authorizers-alpha" = "^2.38.1-alpha.0" From eeb627c01daca31560baaa67ae039ab33c5dd4af Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 2 Feb 2023 09:41:01 +0100 Subject: [PATCH 04/35] chore(deps-dev): bump black from 22.12.0 to 23.1.0 (#1886) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 44 +++++++++++++++++++++++++++++--------------- pyproject.toml | 2 +- 2 files changed, 30 insertions(+), 16 deletions(-) diff --git a/poetry.lock b/poetry.lock index b15102744c6..e0e1751fc63 100644 --- a/poetry.lock +++ b/poetry.lock @@ -228,32 +228,46 @@ yaml = ["PyYAML"] [[package]] name = "black" -version = "22.12.0" +version = "23.1.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, + {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, + {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, + {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, + {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, + {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, + {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, + {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, + {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, + {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, + {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, + {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, + {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, + {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} @@ -2832,4 +2846,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "389c90399ca16ea4b5f3f08b7208eb472d20fb8e0d479c22083d16a6ad0879d6" +content-hash = "0a54196c4e2610ac7a79d60101f49ecd2b948421ecfeac19e9098a909278ccac" diff --git a/pyproject.toml b/pyproject.toml index 7c9825dcc06..807b61cafbc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ typing-extensions = "^4.4.0" [tool.poetry.dev-dependencies] coverage = {extras = ["toml"], version = "^7.1"} pytest = "^7.2.1" -black = "^22.12" +black = "^23.1" boto3 = "^1.18" flake8 = [ # https://github.com/python/importlib_metadata/issues/406 From 52a93d27f4153959bc1235928ebb5bf534f06ab5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 2 Feb 2023 20:12:12 +0000 Subject: [PATCH 05/35] chore(deps-dev): bump mypy-boto3-s3 from 1.26.58 to 1.26.62 (#1889) Bumps [mypy-boto3-s3](https://github.com/youtype/mypy_boto3_builder) from 1.26.58 to 1.26.62. - [Release notes](https://github.com/youtype/mypy_boto3_builder/releases) - [Commits](https://github.com/youtype/mypy_boto3_builder/commits) --- updated-dependencies: - dependency-name: mypy-boto3-s3 dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index e0e1751fc63..9be97c5f9d3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1648,14 +1648,14 @@ typing-extensions = ">=4.1.0" [[package]] name = "mypy-boto3-s3" -version = "1.26.58" -description = "Type annotations for boto3.S3 1.26.58 service generated with mypy-boto3-builder 7.12.3" +version = "1.26.62" +description = "Type annotations for boto3.S3 1.26.62 service generated with mypy-boto3-builder 7.12.3" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-s3-1.26.58.tar.gz", hash = "sha256:02aa9514877147da996ea62e9d3d326d6b33f46c9c20b26f1e45fd125ba03518"}, - {file = "mypy_boto3_s3-1.26.58-py3-none-any.whl", hash = "sha256:34989e04ae85ae5e7b653ea28359e9caad12bdd7eac68071e2166409e5e39e66"}, + {file = "mypy-boto3-s3-1.26.62.tar.gz", hash = "sha256:a817fff28fee6a56d896410ae0a6d848ffe435480ae37ac1c481940d960ecfe5"}, + {file = "mypy_boto3_s3-1.26.62-py3-none-any.whl", hash = "sha256:43eb37eb7a0e8d88f7e99f4906ce3191bdfaa7baba72011b555105248ba98677"}, ] [package.dependencies] @@ -2846,4 +2846,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "0a54196c4e2610ac7a79d60101f49ecd2b948421ecfeac19e9098a909278ccac" +content-hash = "479516d731d3512866f35e0c86750b9a3dbb0d681d5bd4e7a49d3bd48a252e26" diff --git a/pyproject.toml b/pyproject.toml index 807b61cafbc..695d0abe3fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,7 +72,7 @@ mypy-boto3-lambda = "^1.26.55" mypy-boto3-logs = "^1.26.53" mypy-boto3-secretsmanager = "^1.26.49" mypy-boto3-ssm = "^1.26.43" -mypy-boto3-s3 = "^1.26.58" +mypy-boto3-s3 = "^1.26.62" mypy-boto3-xray = "^1.26.11" types-requests = "^2.28.11" typing-extensions = "^4.4.0" From bf2558aabf3f079d2bd9956e1ed913b2d7934941 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 2 Feb 2023 21:53:31 +0100 Subject: [PATCH 06/35] chore(deps-dev): bump mkdocs-material from 9.0.9 to 9.0.10 (#1888) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9be97c5f9d3..f0f8a3fffb5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1463,14 +1463,14 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "9.0.9" +version = "9.0.10" description = "Documentation that simply works" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material-9.0.9-py3-none-any.whl", hash = "sha256:29ebb2aa81e8cfb39e497bdafcacf4cc4aaa20ae31ce334f520c317c5bead1ba"}, - {file = "mkdocs_material-9.0.9.tar.gz", hash = "sha256:c8fa9b1f0fded744a42317e594e5c21f4e3b56f1a0497e7d16951b3bd47784bf"}, + {file = "mkdocs_material-9.0.10-py3-none-any.whl", hash = "sha256:0848611d6dff21d5b4cacaf490606d075cd03b49f3c49809c8aa2e45b1f5216c"}, + {file = "mkdocs_material-9.0.10.tar.gz", hash = "sha256:d0f9734ec58b1d0e3e67c1b1941c60a40067b5e708430b0effa3bc3c323b4dd1"}, ] [package.dependencies] @@ -2846,4 +2846,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "479516d731d3512866f35e0c86750b9a3dbb0d681d5bd4e7a49d3bd48a252e26" +content-hash = "e0e8b35f08d15b5956dddc68a5ddbcea4d68b8694f705d2720acd60fe99f7aca" diff --git a/pyproject.toml b/pyproject.toml index 695d0abe3fd..c33f2562b57 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,7 @@ mypy-boto3-s3 = "^1.26.62" mypy-boto3-xray = "^1.26.11" types-requests = "^2.28.11" typing-extensions = "^4.4.0" -mkdocs-material = "^9.0.8" +mkdocs-material = "^9.0.10" filelock = "^3.9.0" checksumdir = "^1.2.0" mypy-boto3-appconfigdata = "^1.26.0" From 42cf4495fbf8b255e7ef09d790ec2d57341f431e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luis=20Vald=C3=A9s?= Date: Fri, 3 Feb 2023 05:08:31 -0300 Subject: [PATCH 07/35] docs(homepage): set url for end-of-support in announce block (#1893) --- docs/overrides/main.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/overrides/main.html b/docs/overrides/main.html index 10b02d3a905..7f4f2000d9a 100644 --- a/docs/overrides/main.html +++ b/docs/overrides/main.html @@ -2,7 +2,7 @@ {% block announce %} 👋 Powertools for Python v1 will no longer receive updates or releases after 31/03/2023! -We encourage you to read our upgrade guide on how to migrate to v2. +We encourage you to read our upgrade guide on how to migrate to v2. {% endblock %} {% block outdated %} From ba6723ba1dc465b0b2dec958a92ef927b13bf0e1 Mon Sep 17 00:00:00 2001 From: Release bot Date: Fri, 3 Feb 2023 08:08:53 +0000 Subject: [PATCH 08/35] update changelog with latest changes --- CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c075598044c..2819fcb71c4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,10 +4,21 @@ # Unreleased +## Documentation + +* **homepage:** set url for end-of-support in announce block ([#1893](https://github.com/awslabs/aws-lambda-powertools-python/issues/1893)) + ## Features * **metrics:** add default_dimensions to single_metric ([#1880](https://github.com/awslabs/aws-lambda-powertools-python/issues/1880)) +## Maintenance + +* **deps-dev:** bump mkdocs-material from 9.0.9 to 9.0.10 ([#1888](https://github.com/awslabs/aws-lambda-powertools-python/issues/1888)) +* **deps-dev:** bump mypy-boto3-s3 from 1.26.58 to 1.26.62 ([#1889](https://github.com/awslabs/aws-lambda-powertools-python/issues/1889)) +* **deps-dev:** bump black from 22.12.0 to 23.1.0 ([#1886](https://github.com/awslabs/aws-lambda-powertools-python/issues/1886)) +* **deps-dev:** bump aws-cdk-lib from 2.62.2 to 2.63.0 ([#1887](https://github.com/awslabs/aws-lambda-powertools-python/issues/1887)) + ## [v2.7.1] - 2023-02-01 From e2f9f6b0ef73f6cf481d4c9bb0a8df104cddcbdf Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 3 Feb 2023 12:44:16 +0100 Subject: [PATCH 09/35] chore(maintainers): fix release workflow rename Release workflow file was renamed and broke a direct link to access `Publishing workflow`. Signed-off-by: Heitor Lessa --- MAINTAINERS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index c4907cdf57f..90e41eb6345 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -218,7 +218,7 @@ These are some questions to keep in mind when drafting your first or future rele Once you're happy, hit `Publish release` 🎉🎉🎉. -This will kick off the [Publishing workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/on_release_notes.yml) and within a few minutes you should see the latest version in PyPi, and all issues labeled as `pending-release` will be closed and notified. +This will kick off the [Publishing workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/release.yml) and within a few minutes you should see the latest version in PyPi, and all issues labeled as `pending-release` will be closed and notified. > TODO: Include information to verify SAR and Lambda Layers deployment; we're still finalizing Lambda Layer automated deployment in GitHub Actions - ping @am29d when in doubt. From 3e61ca1cc2f795b014bb984a0437d8db34714ae4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 3 Feb 2023 20:15:53 +0000 Subject: [PATCH 10/35] chore(deps-dev): bump mypy-boto3-appconfig from 1.26.0.post1 to 1.26.63 (#1895) Bumps [mypy-boto3-appconfig](https://github.com/youtype/mypy_boto3_builder) from 1.26.0.post1 to 1.26.63. - [Release notes](https://github.com/youtype/mypy_boto3_builder/releases) - [Commits](https://github.com/youtype/mypy_boto3_builder/commits) --- updated-dependencies: - dependency-name: mypy-boto3-appconfig dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index f0f8a3fffb5..9e3bb9f0c52 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1543,14 +1543,14 @@ reports = ["lxml"] [[package]] name = "mypy-boto3-appconfig" -version = "1.26.0.post1" -description = "Type annotations for boto3.AppConfig 1.26.0 service generated with mypy-boto3-builder 7.11.10" +version = "1.26.63" +description = "Type annotations for boto3.AppConfig 1.26.63 service generated with mypy-boto3-builder 7.12.3" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-appconfig-1.26.0.post1.tar.gz", hash = "sha256:eb1473f3e3f6b78f1a1a747a1373dd2e574db016a6b9bc5259f721c3482f2e56"}, - {file = "mypy_boto3_appconfig-1.26.0.post1-py3-none-any.whl", hash = "sha256:749daf4ed2494a899ccfee1e9f564985fb163ede9c82778847276604a6783c15"}, + {file = "mypy-boto3-appconfig-1.26.63.tar.gz", hash = "sha256:d9dee4b5cdeb9148772fd14f96c480f56bedba688debe8e963fe1274ae130925"}, + {file = "mypy_boto3_appconfig-1.26.63-py3-none-any.whl", hash = "sha256:c329feca6bc8b2647323662161c98816485a223e5a48ace66ab74929bdfce268"}, ] [package.dependencies] @@ -2846,4 +2846,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "e0e8b35f08d15b5956dddc68a5ddbcea4d68b8694f705d2720acd60fe99f7aca" +content-hash = "cc9e288ee2524a57548c3ce0d43af26fef4ea38657ae0e3aa656079f68d834a5" diff --git a/pyproject.toml b/pyproject.toml index c33f2562b57..306c31c7660 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ aws-cdk-lib = "^2.63.0" "aws-cdk.aws-apigatewayv2-authorizers-alpha" = "^2.38.1-alpha.0" pytest-benchmark = "^4.0.0" python-snappy = "^0.6.1" -mypy-boto3-appconfig = "^1.26.0" +mypy-boto3-appconfig = "^1.26.63" mypy-boto3-cloudformation = "^1.26.57" mypy-boto3-cloudwatch = "^1.26.52" mypy-boto3-dynamodb = "^1.26.24" From aca5ff1b1aeb26830facac682553045ddfdd85d6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 3 Feb 2023 21:30:08 +0100 Subject: [PATCH 11/35] chore(deps-dev): bump mkdocs-material from 9.0.10 to 9.0.11 (#1896) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9e3bb9f0c52..dc81fdcc57a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1463,14 +1463,14 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "9.0.10" +version = "9.0.11" description = "Documentation that simply works" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material-9.0.10-py3-none-any.whl", hash = "sha256:0848611d6dff21d5b4cacaf490606d075cd03b49f3c49809c8aa2e45b1f5216c"}, - {file = "mkdocs_material-9.0.10.tar.gz", hash = "sha256:d0f9734ec58b1d0e3e67c1b1941c60a40067b5e708430b0effa3bc3c323b4dd1"}, + {file = "mkdocs_material-9.0.11-py3-none-any.whl", hash = "sha256:90a1e1ed41e90de5d0ab97c874b7bf6af488d0faf4aaea8e5868e01f3f1ed923"}, + {file = "mkdocs_material-9.0.11.tar.gz", hash = "sha256:aff49e4ce622a107ed563b3a6a37dc3660a45a0e4d9e7d4d2c13ce9dc02a7faf"}, ] [package.dependencies] @@ -2846,4 +2846,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "cc9e288ee2524a57548c3ce0d43af26fef4ea38657ae0e3aa656079f68d834a5" +content-hash = "03dbf25de6101f231c2207d34de3e96be89ec31e9f2c29cadc2e041f94801883" diff --git a/pyproject.toml b/pyproject.toml index 306c31c7660..9f4eba87d14 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,7 @@ mypy-boto3-s3 = "^1.26.62" mypy-boto3-xray = "^1.26.11" types-requests = "^2.28.11" typing-extensions = "^4.4.0" -mkdocs-material = "^9.0.10" +mkdocs-material = "^9.0.11" filelock = "^3.9.0" checksumdir = "^1.2.0" mypy-boto3-appconfigdata = "^1.26.0" From 173f775b3f8e64f549f8a91c32d11c45d684898e Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 6 Feb 2023 16:50:07 +0000 Subject: [PATCH 12/35] docs(idempotency): add IAM permissions section (#1902) --- docs/utilities/idempotency.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 89cd3003b77..675d0c4f631 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -26,6 +26,13 @@ times with the same parameters**. This makes idempotent operations safe to retry ## Getting started +### IAM Permissions + +Your Lambda function IAM Role must have `dynamodb:GetItem`, `dynamodb:PutItem`, `dynamodb:UpdateItem` and `dynamodb:DeleteItem` IAM permissions before using this feature. + +???+ note + If you're using our example [AWS Serverless Application Model (SAM)](#required-resources), it already adds the required permissions. + ### Required resources Before getting started, you need to create a persistent storage layer where the idempotency utility can store its state - your lambda functions will need read and write access to it. From 3bf9c225364742dd6b687cadfe70b4e6597079de Mon Sep 17 00:00:00 2001 From: Release bot Date: Mon, 6 Feb 2023 16:50:29 +0000 Subject: [PATCH 13/35] update changelog with latest changes --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2819fcb71c4..191674dc63c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ ## Documentation * **homepage:** set url for end-of-support in announce block ([#1893](https://github.com/awslabs/aws-lambda-powertools-python/issues/1893)) +* **idempotency:** add IAM permissions section ([#1902](https://github.com/awslabs/aws-lambda-powertools-python/issues/1902)) ## Features @@ -14,10 +15,13 @@ ## Maintenance +* **deps-dev:** bump mkdocs-material from 9.0.10 to 9.0.11 ([#1896](https://github.com/awslabs/aws-lambda-powertools-python/issues/1896)) +* **deps-dev:** bump mypy-boto3-appconfig from 1.26.0.post1 to 1.26.63 ([#1895](https://github.com/awslabs/aws-lambda-powertools-python/issues/1895)) * **deps-dev:** bump mkdocs-material from 9.0.9 to 9.0.10 ([#1888](https://github.com/awslabs/aws-lambda-powertools-python/issues/1888)) * **deps-dev:** bump mypy-boto3-s3 from 1.26.58 to 1.26.62 ([#1889](https://github.com/awslabs/aws-lambda-powertools-python/issues/1889)) * **deps-dev:** bump black from 22.12.0 to 23.1.0 ([#1886](https://github.com/awslabs/aws-lambda-powertools-python/issues/1886)) * **deps-dev:** bump aws-cdk-lib from 2.62.2 to 2.63.0 ([#1887](https://github.com/awslabs/aws-lambda-powertools-python/issues/1887)) +* **maintainers:** fix release workflow rename From bf4bf669c1900fd41b62902e044452bb090985f0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Feb 2023 20:13:20 +0000 Subject: [PATCH 14/35] chore(deps-dev): bump aws-cdk-lib from 2.63.0 to 2.63.2 (#1904) Bumps [aws-cdk-lib](https://github.com/aws/aws-cdk) from 2.63.0 to 2.63.2. - [Release notes](https://github.com/aws/aws-cdk/releases) - [Changelog](https://github.com/aws/aws-cdk/blob/main/CHANGELOG.v2.md) - [Commits](https://github.com/aws/aws-cdk/compare/v2.63.0...v2.63.2) --- updated-dependencies: - dependency-name: aws-cdk-lib dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index dc81fdcc57a..f506a453d77 100644 --- a/poetry.lock +++ b/poetry.lock @@ -131,14 +131,14 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-lib" -version = "2.63.0" +version = "2.63.2" description = "Version 2 of the AWS Cloud Development Kit library" category = "dev" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk-lib-2.63.0.tar.gz", hash = "sha256:19cdc076967d502c9e75d7af960b857782c3cf5b45663acb442660a3912384a8"}, - {file = "aws_cdk_lib-2.63.0-py3-none-any.whl", hash = "sha256:e0861042302a6ebe649403e8f0249273da21852a06116c4022c73a41fa0c8c5a"}, + {file = "aws-cdk-lib-2.63.2.tar.gz", hash = "sha256:d84a352e817992727f0519240cf51c911fd4e30bc4d166ecb01552ac93eed795"}, + {file = "aws_cdk_lib-2.63.2-py3-none-any.whl", hash = "sha256:07e3151fbf962e6fe8c5e7c73701e810bc8fe927ab6391370510ee3a0665562f"}, ] [package.dependencies] @@ -2846,4 +2846,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "03dbf25de6101f231c2207d34de3e96be89ec31e9f2c29cadc2e041f94801883" +content-hash = "2ac01d684eb8f7a907439654cb41a3a2e86ba8d63a0b6cacb7255cbef99b3c1e" diff --git a/pyproject.toml b/pyproject.toml index 9f4eba87d14..cd112f18721 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ mkdocs-git-revision-date-plugin = "^0.3.2" mike = "^1.1.2" retry = "^0.9.2" pytest-xdist = "^3.1.0" -aws-cdk-lib = "^2.63.0" +aws-cdk-lib = "^2.63.2" "aws-cdk.aws-apigatewayv2-alpha" = "^2.38.1-alpha.0" "aws-cdk.aws-apigatewayv2-integrations-alpha" = "^2.38.1-alpha.0" "aws-cdk.aws-apigatewayv2-authorizers-alpha" = "^2.38.1-alpha.0" From e2bfae04b6ef3fd81161d723cf4b2ca99ad2c5d0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Feb 2023 20:14:35 +0000 Subject: [PATCH 15/35] chore(deps): bump docker/setup-buildx-action from 2.4.0 to 2.4.1 (#1903) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/publish_v2_layer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish_v2_layer.yml b/.github/workflows/publish_v2_layer.yml index b3e6f143dc5..24b23a4f53a 100644 --- a/.github/workflows/publish_v2_layer.yml +++ b/.github/workflows/publish_v2_layer.yml @@ -65,7 +65,7 @@ jobs: # NOTE: we need QEMU to build Layer against a different architecture (e.g., ARM) - name: Set up Docker Buildx id: builder - uses: docker/setup-buildx-action@15c905b16b06416d2086efa066dd8e3a35cc7f98 # v2.4.0 + uses: docker/setup-buildx-action@f03ac48505955848960e80bbb68046aa35c7b9e7 # v2.4.1 with: install: true driver: docker From 279c5a9605fab15115c693b3273eca45bf3234f1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 13:06:53 +0000 Subject: [PATCH 16/35] chore(deps-dev): bump pytest-xdist from 3.1.0 to 3.2.0 (#1905) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index f506a453d77..5c238c72494 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2159,14 +2159,14 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-xdist" -version = "3.1.0" +version = "3.2.0" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-xdist-3.1.0.tar.gz", hash = "sha256:40fdb8f3544921c5dfcd486ac080ce22870e71d82ced6d2e78fa97c2addd480c"}, - {file = "pytest_xdist-3.1.0-py3-none-any.whl", hash = "sha256:70a76f191d8a1d2d6be69fc440cdf85f3e4c03c08b520fd5dc5d338d6cf07d89"}, + {file = "pytest-xdist-3.2.0.tar.gz", hash = "sha256:fa10f95a2564cd91652f2d132725183c3b590d9fdcdec09d3677386ecf4c1ce9"}, + {file = "pytest_xdist-3.2.0-py3-none-any.whl", hash = "sha256:336098e3bbd8193276867cc87db8b22903c3927665dff9d1ac8684c02f597b68"}, ] [package.dependencies] @@ -2846,4 +2846,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "2ac01d684eb8f7a907439654cb41a3a2e86ba8d63a0b6cacb7255cbef99b3c1e" +content-hash = "6e8d5f7600adef0c3be0c13c848449ca968f8261fd9aad42f5378c3e3cecbcda" diff --git a/pyproject.toml b/pyproject.toml index cd112f18721..898f8acfd3a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,7 @@ flake8-bugbear = "^23.1.20" mkdocs-git-revision-date-plugin = "^0.3.2" mike = "^1.1.2" retry = "^0.9.2" -pytest-xdist = "^3.1.0" +pytest-xdist = "^3.2.0" aws-cdk-lib = "^2.63.2" "aws-cdk.aws-apigatewayv2-alpha" = "^2.38.1-alpha.0" "aws-cdk.aws-apigatewayv2-integrations-alpha" = "^2.38.1-alpha.0" From 92a526d219e8ceea2960939d8e7aa0171a51ba72 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 13:07:26 +0000 Subject: [PATCH 17/35] chore(deps-dev): bump types-requests from 2.28.11.8 to 2.28.11.12 (#1906) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5c238c72494..fdb322a4e34 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2619,14 +2619,14 @@ files = [ [[package]] name = "types-requests" -version = "2.28.11.8" +version = "2.28.11.12" description = "Typing stubs for requests" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-requests-2.28.11.8.tar.gz", hash = "sha256:e67424525f84adfbeab7268a159d3c633862dafae15c5b19547ce1b55954f0a3"}, - {file = "types_requests-2.28.11.8-py3-none-any.whl", hash = "sha256:61960554baca0008ae7e2db2bd3b322ca9a144d3e80ce270f5fb640817e40994"}, + {file = "types-requests-2.28.11.12.tar.gz", hash = "sha256:fd530aab3fc4f05ee36406af168f0836e6f00f1ee51a0b96b7311f82cb675230"}, + {file = "types_requests-2.28.11.12-py3-none-any.whl", hash = "sha256:dbc2933635860e553ffc59f5e264264981358baffe6342b925e3eb8261f866ee"}, ] [package.dependencies] From 66a39e564a339893a31aacd9aa653df4a323e4fa Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Thu, 9 Feb 2023 09:18:37 +0000 Subject: [PATCH 18/35] chore(pypi): add new links to Pypi package homepage (#1912) * maintenance(pypi): updating pyproject.toml * maintenance(pypi): updating pyproject.toml - fix --- pyproject.toml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 898f8acfd3a..983e05e200f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,13 +13,18 @@ classifiers=[ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", ] -repository="https://github.com/awslabs/aws-lambda-powertools-python" +repository = "https://github.com/awslabs/aws-lambda-powertools-python" +documentation = "https://awslabs.github.io/aws-lambda-powertools-python/" readme = "README.md" keywords = ["aws_lambda_powertools", "aws", "tracing", "logging", "lambda", "powertools", "feature_flags", "idempotency", "middleware"] # MIT-0 is not recognized as an existing license from poetry. # By using `MIT` as a license value, a `License :: OSI Approved :: MIT License` classifier is added to the classifiers list. license = "MIT" +[tool.poetry.urls] +"Issue tracker" = "https://github.com/awslabs/aws-lambda-powertools-python/issues" +"Releases" = "https://github.com/awslabs/aws-lambda-powertools-python/releases" + [tool.poetry.dependencies] python = "^3.7.4" aws-xray-sdk = { version = "^2.8.0", optional = true } From cca99baf8282b73aa2013583cbe7fdcff81ab824 Mon Sep 17 00:00:00 2001 From: mploski Date: Thu, 9 Feb 2023 10:21:37 +0100 Subject: [PATCH 19/35] fix(idempotency): make idempotent_function decorator thread safe (#1899) * update changelog with latest changes * Add low level dynamodb client * Fix black and mypy warnings * Fix test error + add missing pytest groupings * Simplify parallel function handler * Rename e2e tests * fix(idempotency): updating documentation --------- Co-authored-by: Release bot Co-authored-by: Michal Ploski Co-authored-by: Leandro Damascena --- .../event_handler/api_gateway.py | 1 - aws_lambda_powertools/logging/utils.py | 1 - .../utilities/feature_flags/schema.py | 1 - .../utilities/idempotency/persistence/base.py | 2 +- .../idempotency/persistence/dynamodb.py | 96 +++++++++---------- docs/utilities/idempotency.md | 6 +- mypy.ini | 5 + .../function_thread_safety_handler.py | 29 ++++++ .../handlers/parallel_execution_handler.py | 1 - .../handlers/ttl_cache_expiration_handler.py | 1 - .../handlers/ttl_cache_timeout_handler.py | 1 - tests/e2e/idempotency/infrastructure.py | 1 + .../idempotency/test_idempotency_dynamodb.py | 34 +++++++ tests/e2e/parameters/infrastructure.py | 2 - .../feature_flags/test_feature_flags.py | 2 + tests/functional/idempotency/conftest.py | 48 ++++++---- .../idempotency/test_idempotency.py | 71 +++++++------- tests/functional/idempotency/utils.py | 22 +++-- 18 files changed, 195 insertions(+), 129 deletions(-) create mode 100644 tests/e2e/idempotency/handlers/function_thread_safety_handler.py diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index 7b4001c7265..a44d85455fe 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -836,7 +836,6 @@ def route( # Override _compile_regex to exclude trailing slashes for route resolution @staticmethod def _compile_regex(rule: str, base_regex: str = _ROUTE_REGEX): - return super(APIGatewayRestResolver, APIGatewayRestResolver)._compile_regex(rule, "^{}/*$") diff --git a/aws_lambda_powertools/logging/utils.py b/aws_lambda_powertools/logging/utils.py index 05ac6d5001b..5cd8093073a 100644 --- a/aws_lambda_powertools/logging/utils.py +++ b/aws_lambda_powertools/logging/utils.py @@ -12,7 +12,6 @@ def copy_config_to_registered_loggers( exclude: Optional[Set[str]] = None, include: Optional[Set[str]] = None, ) -> None: - """Copies source Logger level and handler to all registered loggers for consistent formatting. Parameters diff --git a/aws_lambda_powertools/utilities/feature_flags/schema.py b/aws_lambda_powertools/utilities/feature_flags/schema.py index 48a1eb77129..2fb690301c6 100644 --- a/aws_lambda_powertools/utilities/feature_flags/schema.py +++ b/aws_lambda_powertools/utilities/feature_flags/schema.py @@ -272,7 +272,6 @@ def __init__(self, rule: Dict[str, Any], rule_name: str, logger: Optional[Union[ self.logger = logger or logging.getLogger(__name__) def validate(self): - if not self.conditions or not isinstance(self.conditions, list): self.logger.debug(f"Condition is empty or invalid for rule={self.rule_name}") raise SchemaValidationError(f"Invalid condition, rule={self.rule_name}") diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index a87980d7fe0..b3504dfeacd 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -37,7 +37,7 @@ class DataRecord: def __init__( self, - idempotency_key, + idempotency_key: str, status: str = "", expiry_timestamp: Optional[int] = None, in_progress_expiry_timestamp: Optional[int] = None, diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py index 5d4d999ae1d..b05d8216b50 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py @@ -4,7 +4,9 @@ from typing import Any, Dict, Optional import boto3 +from boto3.dynamodb.types import TypeDeserializer from botocore.config import Config +from botocore.exceptions import ClientError from aws_lambda_powertools.shared import constants from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer @@ -79,13 +81,14 @@ def __init__( self._boto_config = boto_config or Config() self._boto3_session = boto3_session or boto3.session.Session() + self._client = self._boto3_session.client("dynamodb", config=self._boto_config) + if sort_key_attr == key_attr: raise ValueError(f"key_attr [{key_attr}] and sort_key_attr [{sort_key_attr}] cannot be the same!") if static_pk_value is None: static_pk_value = f"idempotency#{os.getenv(constants.LAMBDA_FUNCTION_NAME_ENV, '')}" - self._table = None self.table_name = table_name self.key_attr = key_attr self.static_pk_value = static_pk_value @@ -95,31 +98,15 @@ def __init__( self.status_attr = status_attr self.data_attr = data_attr self.validation_key_attr = validation_key_attr - super(DynamoDBPersistenceLayer, self).__init__() - @property - def table(self): - """ - Caching property to store boto3 dynamodb Table resource + self._deserializer = TypeDeserializer() - """ - if self._table: - return self._table - ddb_resource = self._boto3_session.resource("dynamodb", config=self._boto_config) - self._table = ddb_resource.Table(self.table_name) - return self._table - - @table.setter - def table(self, table): - """ - Allow table instance variable to be set directly, primarily for use in tests - """ - self._table = table + super(DynamoDBPersistenceLayer, self).__init__() def _get_key(self, idempotency_key: str) -> dict: if self.sort_key_attr: - return {self.key_attr: self.static_pk_value, self.sort_key_attr: idempotency_key} - return {self.key_attr: idempotency_key} + return {self.key_attr: {"S": self.static_pk_value}, self.sort_key_attr: {"S": idempotency_key}} + return {self.key_attr: {"S": idempotency_key}} def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: """ @@ -136,36 +123,39 @@ def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: representation of item """ + data = self._deserializer.deserialize({"M": item}) return DataRecord( - idempotency_key=item[self.key_attr], - status=item[self.status_attr], - expiry_timestamp=item[self.expiry_attr], - in_progress_expiry_timestamp=item.get(self.in_progress_expiry_attr), - response_data=item.get(self.data_attr), - payload_hash=item.get(self.validation_key_attr), + idempotency_key=data[self.key_attr], + status=data[self.status_attr], + expiry_timestamp=data[self.expiry_attr], + in_progress_expiry_timestamp=data.get(self.in_progress_expiry_attr), + response_data=data.get(self.data_attr), + payload_hash=data.get(self.validation_key_attr), ) def _get_record(self, idempotency_key) -> DataRecord: - response = self.table.get_item(Key=self._get_key(idempotency_key), ConsistentRead=True) - + response = self._client.get_item( + TableName=self.table_name, Key=self._get_key(idempotency_key), ConsistentRead=True + ) try: item = response["Item"] - except KeyError: - raise IdempotencyItemNotFoundError + except KeyError as exc: + raise IdempotencyItemNotFoundError from exc return self._item_to_data_record(item) def _put_record(self, data_record: DataRecord) -> None: item = { **self._get_key(data_record.idempotency_key), - self.expiry_attr: data_record.expiry_timestamp, - self.status_attr: data_record.status, + self.key_attr: {"S": data_record.idempotency_key}, + self.expiry_attr: {"N": str(data_record.expiry_timestamp)}, + self.status_attr: {"S": data_record.status}, } if data_record.in_progress_expiry_timestamp is not None: - item[self.in_progress_expiry_attr] = data_record.in_progress_expiry_timestamp + item[self.in_progress_expiry_attr] = {"N": str(data_record.in_progress_expiry_timestamp)} - if self.payload_validation_enabled: - item[self.validation_key_attr] = data_record.payload_hash + if self.payload_validation_enabled and data_record.payload_hash: + item[self.validation_key_attr] = {"S": data_record.payload_hash} now = datetime.datetime.now() try: @@ -199,8 +189,8 @@ def _put_record(self, data_record: DataRecord) -> None: condition_expression = ( f"{idempotency_key_not_exist} OR {idempotency_expiry_expired} OR ({inprogress_expiry_expired})" ) - - self.table.put_item( + self._client.put_item( + TableName=self.table_name, Item=item, ConditionExpression=condition_expression, ExpressionAttributeNames={ @@ -210,22 +200,28 @@ def _put_record(self, data_record: DataRecord) -> None: "#status": self.status_attr, }, ExpressionAttributeValues={ - ":now": int(now.timestamp()), - ":now_in_millis": int(now.timestamp() * 1000), - ":inprogress": STATUS_CONSTANTS["INPROGRESS"], + ":now": {"N": str(int(now.timestamp()))}, + ":now_in_millis": {"N": str(int(now.timestamp() * 1000))}, + ":inprogress": {"S": STATUS_CONSTANTS["INPROGRESS"]}, }, ) - except self.table.meta.client.exceptions.ConditionalCheckFailedException: - logger.debug(f"Failed to put record for already existing idempotency key: {data_record.idempotency_key}") - raise IdempotencyItemAlreadyExistsError + except ClientError as exc: + error_code = exc.response.get("Error", {}).get("Code") + if error_code == "ConditionalCheckFailedException": + logger.debug( + f"Failed to put record for already existing idempotency key: {data_record.idempotency_key}" + ) + raise IdempotencyItemAlreadyExistsError from exc + else: + raise def _update_record(self, data_record: DataRecord): logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") update_expression = "SET #response_data = :response_data, #expiry = :expiry, " "#status = :status" expression_attr_values = { - ":expiry": data_record.expiry_timestamp, - ":response_data": data_record.response_data, - ":status": data_record.status, + ":expiry": {"N": str(data_record.expiry_timestamp)}, + ":response_data": {"S": data_record.response_data}, + ":status": {"S": data_record.status}, } expression_attr_names = { "#expiry": self.expiry_attr, @@ -235,7 +231,7 @@ def _update_record(self, data_record: DataRecord): if self.payload_validation_enabled: update_expression += ", #validation_key = :validation_key" - expression_attr_values[":validation_key"] = data_record.payload_hash + expression_attr_values[":validation_key"] = {"S": data_record.payload_hash} expression_attr_names["#validation_key"] = self.validation_key_attr kwargs = { @@ -245,8 +241,8 @@ def _update_record(self, data_record: DataRecord): "ExpressionAttributeNames": expression_attr_names, } - self.table.update_item(**kwargs) + self._client.update_item(TableName=self.table_name, **kwargs) def _delete_record(self, data_record: DataRecord) -> None: logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") - self.table.delete_item(Key=self._get_key(data_record.idempotency_key)) + self._client.delete_item(TableName=self.table_name, Key={**self._get_key(data_record.idempotency_key)}) diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 675d0c4f631..49a028168b3 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -132,13 +132,9 @@ When using `idempotent_function`, you must tell us which keyword parameter in yo !!! info "We support JSON serializable data, [Python Dataclasses](https://docs.python.org/3.7/library/dataclasses.html){target="_blank"}, [Parser/Pydantic Models](parser.md){target="_blank"}, and our [Event Source Data Classes](./data_classes.md){target="_blank"}." -???+ warning "Limitations" +???+ warning "Limitation" Make sure to call your decorated function using keyword arguments. - Decorated functions with `idempotent_function` are not thread-safe, if the caller uses threading, not the function computation itself. - - DynamoDB Persistency layer uses a Resource client [which is not thread-safe](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/resources.html?highlight=multithreading#multithreading-or-multiprocessing-with-resources){target="_blank"}. - === "dataclass_sample.py" ```python hl_lines="3-4 23 33" diff --git a/mypy.ini b/mypy.ini index fd14881dfb1..d55936b702b 100644 --- a/mypy.ini +++ b/mypy.ini @@ -29,6 +29,9 @@ ignore_missing_imports = True [mypy-boto3.dynamodb.conditions] ignore_missing_imports = True +[mypy-boto3.dynamodb.types] +ignore_missing_imports = True + [mypy-botocore.config] ignore_missing_imports = True @@ -58,3 +61,5 @@ ignore_missing_imports = True [mypy-ijson] ignore_missing_imports = True + + diff --git a/tests/e2e/idempotency/handlers/function_thread_safety_handler.py b/tests/e2e/idempotency/handlers/function_thread_safety_handler.py new file mode 100644 index 00000000000..6e23759b29e --- /dev/null +++ b/tests/e2e/idempotency/handlers/function_thread_safety_handler.py @@ -0,0 +1,29 @@ +import os +import time +from concurrent.futures import ThreadPoolExecutor, as_completed +from threading import current_thread + +from aws_lambda_powertools.utilities.idempotency import ( + DynamoDBPersistenceLayer, + idempotent_function, +) + +TABLE_NAME = os.getenv("IdempotencyTable", "") +persistence_layer = DynamoDBPersistenceLayer(table_name=TABLE_NAME) +threads_count = 2 + + +@idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") +def record_handler(record): + time_now = time.time() + return {"thread_name": current_thread().name, "time": str(time_now)} + + +def lambda_handler(event, context): + with ThreadPoolExecutor(max_workers=threads_count) as executor: + futures = [executor.submit(record_handler, **{"record": event}) for _ in range(threads_count)] + + return [ + {"state": future._state, "exception": future.exception(), "output": future.result()} + for future in as_completed(futures) + ] diff --git a/tests/e2e/idempotency/handlers/parallel_execution_handler.py b/tests/e2e/idempotency/handlers/parallel_execution_handler.py index 6dcb012d858..0ccb00a3bec 100644 --- a/tests/e2e/idempotency/handlers/parallel_execution_handler.py +++ b/tests/e2e/idempotency/handlers/parallel_execution_handler.py @@ -12,7 +12,6 @@ @idempotent(persistence_store=persistence_layer) def lambda_handler(event, context): - time.sleep(5) return event diff --git a/tests/e2e/idempotency/handlers/ttl_cache_expiration_handler.py b/tests/e2e/idempotency/handlers/ttl_cache_expiration_handler.py index 4cd71045dc0..a9bf4fb2b64 100644 --- a/tests/e2e/idempotency/handlers/ttl_cache_expiration_handler.py +++ b/tests/e2e/idempotency/handlers/ttl_cache_expiration_handler.py @@ -14,7 +14,6 @@ @idempotent(config=config, persistence_store=persistence_layer) def lambda_handler(event, context): - time_now = time.time() return {"time": str(time_now)} diff --git a/tests/e2e/idempotency/handlers/ttl_cache_timeout_handler.py b/tests/e2e/idempotency/handlers/ttl_cache_timeout_handler.py index 99be7b63391..ad1a51b495d 100644 --- a/tests/e2e/idempotency/handlers/ttl_cache_timeout_handler.py +++ b/tests/e2e/idempotency/handlers/ttl_cache_timeout_handler.py @@ -14,7 +14,6 @@ @idempotent(config=config, persistence_store=persistence_layer) def lambda_handler(event, context): - sleep_time: int = event.get("sleep") or 0 time.sleep(sleep_time) diff --git a/tests/e2e/idempotency/infrastructure.py b/tests/e2e/idempotency/infrastructure.py index abe69f6a5e6..00d3761b829 100644 --- a/tests/e2e/idempotency/infrastructure.py +++ b/tests/e2e/idempotency/infrastructure.py @@ -15,6 +15,7 @@ def create_resources(self): table.grant_read_write_data(functions["TtlCacheExpirationHandler"]) table.grant_read_write_data(functions["TtlCacheTimeoutHandler"]) table.grant_read_write_data(functions["ParallelExecutionHandler"]) + table.grant_read_write_data(functions["FunctionThreadSafetyHandler"]) def _create_dynamodb_table(self) -> Table: table = dynamodb.Table( diff --git a/tests/e2e/idempotency/test_idempotency_dynamodb.py b/tests/e2e/idempotency/test_idempotency_dynamodb.py index d3452a1a161..06147227549 100644 --- a/tests/e2e/idempotency/test_idempotency_dynamodb.py +++ b/tests/e2e/idempotency/test_idempotency_dynamodb.py @@ -22,6 +22,11 @@ def parallel_execution_handler_fn_arn(infrastructure: dict) -> str: return infrastructure.get("ParallelExecutionHandlerArn", "") +@pytest.fixture +def function_thread_safety_handler_fn_arn(infrastructure: dict) -> str: + return infrastructure.get("FunctionThreadSafetyHandlerArn", "") + + @pytest.fixture def idempotency_table_name(infrastructure: dict) -> str: return infrastructure.get("DynamoDBTable", "") @@ -97,3 +102,32 @@ def test_parallel_execution_idempotency(parallel_execution_handler_fn_arn: str): # THEN assert "Execution already in progress with idempotency key" in error_idempotency_execution_response assert "Task timed out after" in timeout_execution_response + + +@pytest.mark.xdist_group(name="idempotency") +def test_idempotent_function_thread_safety(function_thread_safety_handler_fn_arn: str): + # GIVEN + payload = json.dumps({"message": "Lambda Powertools - Idempotent function thread safety check"}) + + # WHEN + # first execution + first_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=function_thread_safety_handler_fn_arn, payload=payload + ) + first_execution_response = first_execution["Payload"].read().decode("utf-8") + + # the second execution should return the same response as the first execution + second_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=function_thread_safety_handler_fn_arn, payload=payload + ) + second_execution_response = second_execution["Payload"].read().decode("utf-8") + + # THEN + # Function threads finished without exception AND + # first and second execution is the same + for function_thread in json.loads(first_execution_response): + assert function_thread["state"] == "FINISHED" + assert function_thread["exception"] is None + assert function_thread["output"] is not None + + assert first_execution_response == second_execution_response diff --git a/tests/e2e/parameters/infrastructure.py b/tests/e2e/parameters/infrastructure.py index 018fceab2aa..58065ea9848 100644 --- a/tests/e2e/parameters/infrastructure.py +++ b/tests/e2e/parameters/infrastructure.py @@ -34,7 +34,6 @@ def create_resources(self): ) def _create_app_config(self, function: Function): - service_name = build_service_name() cfn_application = appconfig.CfnApplication( @@ -82,7 +81,6 @@ def _create_app_config_freeform( function: Function, service_name: str, ): - cfn_configuration_profile = appconfig.CfnConfigurationProfile( self.stack, "appconfig-profile", diff --git a/tests/functional/feature_flags/test_feature_flags.py b/tests/functional/feature_flags/test_feature_flags.py index 416fe0be3ba..12568c750e4 100644 --- a/tests/functional/feature_flags/test_feature_flags.py +++ b/tests/functional/feature_flags/test_feature_flags.py @@ -315,6 +315,7 @@ def test_flags_conditions_rule_match_multiple_actions_multiple_rules_multiple_co # check a case where the feature exists but the rule doesn't match so we revert to the default value of the feature + # Check IN/NOT_IN/KEY_IN_VALUE/KEY_NOT_IN_VALUE/VALUE_IN_KEY/VALUE_NOT_IN_KEY conditions def test_flags_match_rule_with_in_action(mocker, config): expected_value = True @@ -775,6 +776,7 @@ def test_get_configuration_with_envelope_and_raw(mocker, config): ## Inequality test cases ## + # Test not equals def test_flags_not_equal_no_match(mocker, config): expected_value = False diff --git a/tests/functional/idempotency/conftest.py b/tests/functional/idempotency/conftest.py index 657a4b6bd13..7e5fa0e7c61 100644 --- a/tests/functional/idempotency/conftest.py +++ b/tests/functional/idempotency/conftest.py @@ -85,11 +85,11 @@ def expected_params_update_item(serialized_lambda_response, hashed_idempotency_k "#status": "status", }, "ExpressionAttributeValues": { - ":expiry": stub.ANY, - ":response_data": serialized_lambda_response, - ":status": "COMPLETED", + ":expiry": {"N": stub.ANY}, + ":response_data": {"S": serialized_lambda_response}, + ":status": {"S": "COMPLETED"}, }, - "Key": {"id": hashed_idempotency_key}, + "Key": {"id": {"S": hashed_idempotency_key}}, "TableName": "TEST_TABLE", "UpdateExpression": "SET #response_data = :response_data, " "#expiry = :expiry, #status = :status", } @@ -107,12 +107,12 @@ def expected_params_update_item_with_validation( "#validation_key": "validation", }, "ExpressionAttributeValues": { - ":expiry": stub.ANY, - ":response_data": serialized_lambda_response, - ":status": "COMPLETED", - ":validation_key": hashed_validation_key, + ":expiry": {"N": stub.ANY}, + ":response_data": {"S": serialized_lambda_response}, + ":status": {"S": "COMPLETED"}, + ":validation_key": {"S": hashed_validation_key}, }, - "Key": {"id": hashed_idempotency_key}, + "Key": {"id": {"S": hashed_idempotency_key}}, "TableName": "TEST_TABLE", "UpdateExpression": ( "SET #response_data = :response_data, " @@ -135,12 +135,16 @@ def expected_params_put_item(hashed_idempotency_key): "#status": "status", "#in_progress_expiry": "in_progress_expiration", }, - "ExpressionAttributeValues": {":now": stub.ANY, ":now_in_millis": stub.ANY, ":inprogress": "INPROGRESS"}, + "ExpressionAttributeValues": { + ":now": {"N": stub.ANY}, + ":now_in_millis": {"N": stub.ANY}, + ":inprogress": {"S": "INPROGRESS"}, + }, "Item": { - "expiration": stub.ANY, - "id": hashed_idempotency_key, - "status": "INPROGRESS", - "in_progress_expiration": stub.ANY, + "expiration": {"N": stub.ANY}, + "in_progress_expiration": {"N": stub.ANY}, + "id": {"S": hashed_idempotency_key}, + "status": {"S": "INPROGRESS"}, }, "TableName": "TEST_TABLE", } @@ -159,13 +163,17 @@ def expected_params_put_item_with_validation(hashed_idempotency_key, hashed_vali "#status": "status", "#in_progress_expiry": "in_progress_expiration", }, - "ExpressionAttributeValues": {":now": stub.ANY, ":now_in_millis": stub.ANY, ":inprogress": "INPROGRESS"}, + "ExpressionAttributeValues": { + ":now": {"N": stub.ANY}, + ":now_in_millis": {"N": stub.ANY}, + ":inprogress": {"S": "INPROGRESS"}, + }, "Item": { - "expiration": stub.ANY, - "in_progress_expiration": stub.ANY, - "id": hashed_idempotency_key, - "status": "INPROGRESS", - "validation": hashed_validation_key, + "expiration": {"N": stub.ANY}, + "in_progress_expiration": {"N": stub.ANY}, + "id": {"S": hashed_idempotency_key}, + "status": {"S": "INPROGRESS"}, + "validation": {"S": hashed_validation_key}, }, "TableName": "TEST_TABLE", } diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py index e3131747e48..dfc6b03b60c 100644 --- a/tests/functional/idempotency/test_idempotency.py +++ b/tests/functional/idempotency/test_idempotency.py @@ -8,6 +8,7 @@ import jmespath import pytest from botocore import stub +from botocore.config import Config from pydantic import BaseModel from aws_lambda_powertools.utilities.data_classes import ( @@ -75,7 +76,7 @@ def test_idempotent_lambda_already_completed( Test idempotent decorator where event with matching event key has already been successfully processed """ - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) ddb_response = { "Item": { "id": {"S": hashed_idempotency_key}, @@ -87,7 +88,7 @@ def test_idempotent_lambda_already_completed( expected_params = { "TableName": TABLE_NAME, - "Key": {"id": hashed_idempotency_key}, + "Key": {"id": {"S": hashed_idempotency_key}}, "ConsistentRead": True, } stubber.add_client_error("put_item", "ConditionalCheckFailedException") @@ -119,11 +120,11 @@ def test_idempotent_lambda_in_progress( Test idempotent decorator where lambda_handler is already processing an event with matching event key """ - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) expected_params = { "TableName": TABLE_NAME, - "Key": {"id": hashed_idempotency_key}, + "Key": {"id": {"S": hashed_idempotency_key}}, "ConsistentRead": True, } ddb_response = { @@ -171,11 +172,11 @@ def test_idempotent_lambda_in_progress_with_cache( """ save_to_cache_spy = mocker.spy(persistence_store, "_save_to_cache") retrieve_from_cache_spy = mocker.spy(persistence_store, "_retrieve_from_cache") - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) expected_params = { "TableName": TABLE_NAME, - "Key": {"id": hashed_idempotency_key}, + "Key": {"id": {"S": hashed_idempotency_key}}, "ConsistentRead": True, } ddb_response = { @@ -233,7 +234,7 @@ def test_idempotent_lambda_first_execution( Test idempotent decorator when lambda is executed with an event with a previously unknown event key """ - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) ddb_response = {} stubber.add_response("put_item", ddb_response, expected_params_put_item) @@ -268,7 +269,7 @@ def test_idempotent_lambda_first_execution_cached( """ save_to_cache_spy = mocker.spy(persistence_store, "_save_to_cache") retrieve_from_cache_spy = mocker.spy(persistence_store, "_retrieve_from_cache") - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) ddb_response = {} stubber.add_response("put_item", ddb_response, expected_params_put_item) @@ -309,7 +310,7 @@ def test_idempotent_lambda_first_execution_event_mutation( Ensures we're passing data by value, not reference. """ event = copy.deepcopy(lambda_apigw_event) - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) ddb_response = {} stubber.add_response( "put_item", @@ -349,7 +350,7 @@ def test_idempotent_lambda_expired( expiry window """ - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) ddb_response = {} @@ -384,10 +385,10 @@ def test_idempotent_lambda_exception( # Create a new provider # Stub the boto3 client - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) ddb_response = {} - expected_params_delete_item = {"TableName": TABLE_NAME, "Key": {"id": hashed_idempotency_key}} + expected_params_delete_item = {"TableName": TABLE_NAME, "Key": {"id": {"S": hashed_idempotency_key}}} stubber.add_response("put_item", ddb_response, expected_params_put_item) stubber.add_response("delete_item", ddb_response, expected_params_delete_item) @@ -426,7 +427,7 @@ def test_idempotent_lambda_already_completed_with_validation_bad_payload( Test idempotent decorator where event with matching event key has already been successfully processed """ - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) ddb_response = { "Item": { "id": {"S": hashed_idempotency_key}, @@ -437,7 +438,7 @@ def test_idempotent_lambda_already_completed_with_validation_bad_payload( } } - expected_params = {"TableName": TABLE_NAME, "Key": {"id": hashed_idempotency_key}, "ConsistentRead": True} + expected_params = {"TableName": TABLE_NAME, "Key": {"id": {"S": hashed_idempotency_key}}, "ConsistentRead": True} stubber.add_client_error("put_item", "ConditionalCheckFailedException") stubber.add_response("get_item", ddb_response, expected_params) @@ -470,7 +471,7 @@ def test_idempotent_lambda_expired_during_request( returns inconsistent/rapidly changing result between put_item and get_item calls. """ - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) ddb_response_get_item = { "Item": { @@ -483,7 +484,7 @@ def test_idempotent_lambda_expired_during_request( ddb_response_get_item_missing = {} expected_params_get_item = { "TableName": TABLE_NAME, - "Key": {"id": hashed_idempotency_key}, + "Key": {"id": {"S": hashed_idempotency_key}}, "ConsistentRead": True, } @@ -523,7 +524,7 @@ def test_idempotent_persistence_exception_deleting( Test idempotent decorator when lambda is executed with an event with a previously unknown event key, but lambda_handler raises an exception which is retryable. """ - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) ddb_response = {} @@ -555,7 +556,7 @@ def test_idempotent_persistence_exception_updating( Test idempotent decorator when lambda is executed with an event with a previously unknown event key, but lambda_handler raises an exception which is retryable. """ - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) ddb_response = {} @@ -586,7 +587,7 @@ def test_idempotent_persistence_exception_getting( Test idempotent decorator when lambda is executed with an event with a previously unknown event key, but lambda_handler raises an exception which is retryable. """ - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) stubber.add_client_error("put_item", "ConditionalCheckFailedException") stubber.add_client_error("get_item", "UnexpectedException") @@ -624,7 +625,7 @@ def test_idempotent_lambda_first_execution_with_validation( """ Test idempotent decorator when lambda is executed with an event with a previously unknown event key """ - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) ddb_response = {} stubber.add_response("put_item", ddb_response, expected_params_put_item_with_validation) @@ -660,7 +661,7 @@ def test_idempotent_lambda_with_validator_util( validator utility to unwrap the event """ - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) ddb_response = { "Item": { "id": {"S": hashed_idempotency_key_with_envelope}, @@ -672,7 +673,7 @@ def test_idempotent_lambda_with_validator_util( expected_params = { "TableName": TABLE_NAME, - "Key": {"id": hashed_idempotency_key_with_envelope}, + "Key": {"id": {"S": hashed_idempotency_key_with_envelope}}, "ConsistentRead": True, } stubber.add_client_error("put_item", "ConditionalCheckFailedException") @@ -703,7 +704,7 @@ def test_idempotent_lambda_expires_in_progress_before_expire( hashed_idempotency_key, lambda_context, ): - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) stubber.add_client_error("put_item", "ConditionalCheckFailedException") @@ -713,7 +714,7 @@ def test_idempotent_lambda_expires_in_progress_before_expire( expected_params_get_item = { "TableName": TABLE_NAME, - "Key": {"id": hashed_idempotency_key}, + "Key": {"id": {"S": hashed_idempotency_key}}, "ConsistentRead": True, } ddb_response_get_item = { @@ -750,7 +751,7 @@ def test_idempotent_lambda_expires_in_progress_after_expire( hashed_idempotency_key, lambda_context, ): - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) for _ in range(MAX_RETRIES + 1): stubber.add_client_error("put_item", "ConditionalCheckFailedException") @@ -758,7 +759,7 @@ def test_idempotent_lambda_expires_in_progress_after_expire( one_second_ago = datetime.datetime.now() - datetime.timedelta(seconds=1) expected_params_get_item = { "TableName": TABLE_NAME, - "Key": {"id": hashed_idempotency_key}, + "Key": {"id": {"S": hashed_idempotency_key}}, "ConsistentRead": True, } ddb_response_get_item = { @@ -1069,7 +1070,7 @@ def test_custom_jmespath_function_overrides_builtin_functions( def test_idempotent_lambda_save_inprogress_error(persistence_store: DynamoDBPersistenceLayer, lambda_context): # GIVEN a miss configured persistence layer # like no table was created for the idempotency persistence layer - stubber = stub.Stubber(persistence_store.table.meta.client) + stubber = stub.Stubber(persistence_store._client) service_error_code = "ResourceNotFoundException" service_message = "Custom message" @@ -1326,7 +1327,7 @@ def test_idempotency_disabled_envvar(monkeypatch, lambda_context, persistence_st # Scenario to validate no requests sent to dynamodb table when 'POWERTOOLS_IDEMPOTENCY_DISABLED' is set mock_event = {"data": "value"} - persistence_store.table = MagicMock() + persistence_store._client = MagicMock() monkeypatch.setenv("POWERTOOLS_IDEMPOTENCY_DISABLED", "1") @@ -1341,7 +1342,7 @@ def dummy_handler(event, context): dummy(data=mock_event) dummy_handler(mock_event, lambda_context) - assert len(persistence_store.table.method_calls) == 0 + assert len(persistence_store._client.method_calls) == 0 @pytest.mark.parametrize("idempotency_config", [{"use_local_cache": True}], indirect=True) @@ -1350,7 +1351,7 @@ def test_idempotent_function_duplicates( ): # Scenario to validate the both methods are called mock_event = {"data": "value"} - persistence_store.table = MagicMock() + persistence_store._client = MagicMock() @idempotent_function(data_keyword_argument="data", persistence_store=persistence_store, config=idempotency_config) def one(data): @@ -1362,16 +1363,14 @@ def two(data): assert one(data=mock_event) == "one" assert two(data=mock_event) == "two" - assert len(persistence_store.table.method_calls) == 4 + assert len(persistence_store._client.method_calls) == 4 def test_invalid_dynamodb_persistence_layer(): # Scenario constructing a DynamoDBPersistenceLayer with a key_attr matching sort_key_attr should fail with pytest.raises(ValueError) as ve: DynamoDBPersistenceLayer( - table_name="Foo", - key_attr="id", - sort_key_attr="id", + table_name="Foo", key_attr="id", sort_key_attr="id", boto_config=Config(region_name="eu-west-1") ) # and raise a ValueError assert str(ve.value) == "key_attr [id] and sort_key_attr [id] cannot be the same!" @@ -1476,7 +1475,7 @@ def test_idempotent_lambda_compound_already_completed( Test idempotent decorator having a DynamoDBPersistenceLayer with a compound key """ - stubber = stub.Stubber(persistence_store_compound.table.meta.client) + stubber = stub.Stubber(persistence_store_compound._client) stubber.add_client_error("put_item", "ConditionalCheckFailedException") ddb_response = { "Item": { @@ -1489,7 +1488,7 @@ def test_idempotent_lambda_compound_already_completed( } expected_params = { "TableName": TABLE_NAME, - "Key": {"id": "idempotency#", "sk": hashed_idempotency_key}, + "Key": {"id": {"S": "idempotency#"}, "sk": {"S": hashed_idempotency_key}}, "ConsistentRead": True, } stubber.add_response("get_item", ddb_response, expected_params) diff --git a/tests/functional/idempotency/utils.py b/tests/functional/idempotency/utils.py index f9cdaf05d0a..d12f1dbba1e 100644 --- a/tests/functional/idempotency/utils.py +++ b/tests/functional/idempotency/utils.py @@ -32,12 +32,16 @@ def build_idempotency_put_item_stub( "#status": "status", "#in_progress_expiry": "in_progress_expiration", }, - "ExpressionAttributeValues": {":now": stub.ANY, ":now_in_millis": stub.ANY, ":inprogress": "INPROGRESS"}, + "ExpressionAttributeValues": { + ":now": {"N": stub.ANY}, + ":now_in_millis": {"N": stub.ANY}, + ":inprogress": {"S": "INPROGRESS"}, + }, "Item": { - "expiration": stub.ANY, - "id": idempotency_key_hash, - "status": "INPROGRESS", - "in_progress_expiration": stub.ANY, + "expiration": {"N": stub.ANY}, + "id": {"S": idempotency_key_hash}, + "status": {"S": "INPROGRESS"}, + "in_progress_expiration": {"N": stub.ANY}, }, "TableName": "TEST_TABLE", } @@ -62,11 +66,11 @@ def build_idempotency_update_item_stub( "#status": "status", }, "ExpressionAttributeValues": { - ":expiry": stub.ANY, - ":response_data": serialized_lambda_response, - ":status": "COMPLETED", + ":expiry": {"N": stub.ANY}, + ":response_data": {"S": serialized_lambda_response}, + ":status": {"S": "COMPLETED"}, }, - "Key": {"id": idempotency_key_hash}, + "Key": {"id": {"S": idempotency_key_hash}}, "TableName": "TEST_TABLE", "UpdateExpression": "SET #response_data = :response_data, " "#expiry = :expiry, #status = :status", } From 9be825a1641eb308c71b10184abbc8808d350628 Mon Sep 17 00:00:00 2001 From: Release bot Date: Thu, 9 Feb 2023 09:22:05 +0000 Subject: [PATCH 20/35] update changelog with latest changes --- CHANGELOG.md | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 191674dc63c..f0eaf6e97cf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,10 @@ # Unreleased +## Bug Fixes + +* **idempotency:** make idempotent_function decorator thread safe ([#1899](https://github.com/awslabs/aws-lambda-powertools-python/issues/1899)) + ## Documentation * **homepage:** set url for end-of-support in announce block ([#1893](https://github.com/awslabs/aws-lambda-powertools-python/issues/1893)) @@ -15,13 +19,18 @@ ## Maintenance -* **deps-dev:** bump mkdocs-material from 9.0.10 to 9.0.11 ([#1896](https://github.com/awslabs/aws-lambda-powertools-python/issues/1896)) +* **deps:** bump docker/setup-buildx-action from 2.4.0 to 2.4.1 ([#1903](https://github.com/awslabs/aws-lambda-powertools-python/issues/1903)) * **deps-dev:** bump mypy-boto3-appconfig from 1.26.0.post1 to 1.26.63 ([#1895](https://github.com/awslabs/aws-lambda-powertools-python/issues/1895)) +* **deps-dev:** bump types-requests from 2.28.11.8 to 2.28.11.12 ([#1906](https://github.com/awslabs/aws-lambda-powertools-python/issues/1906)) +* **deps-dev:** bump pytest-xdist from 3.1.0 to 3.2.0 ([#1905](https://github.com/awslabs/aws-lambda-powertools-python/issues/1905)) +* **deps-dev:** bump aws-cdk-lib from 2.63.0 to 2.63.2 ([#1904](https://github.com/awslabs/aws-lambda-powertools-python/issues/1904)) +* **deps-dev:** bump mkdocs-material from 9.0.10 to 9.0.11 ([#1896](https://github.com/awslabs/aws-lambda-powertools-python/issues/1896)) * **deps-dev:** bump mkdocs-material from 9.0.9 to 9.0.10 ([#1888](https://github.com/awslabs/aws-lambda-powertools-python/issues/1888)) * **deps-dev:** bump mypy-boto3-s3 from 1.26.58 to 1.26.62 ([#1889](https://github.com/awslabs/aws-lambda-powertools-python/issues/1889)) * **deps-dev:** bump black from 22.12.0 to 23.1.0 ([#1886](https://github.com/awslabs/aws-lambda-powertools-python/issues/1886)) * **deps-dev:** bump aws-cdk-lib from 2.62.2 to 2.63.0 ([#1887](https://github.com/awslabs/aws-lambda-powertools-python/issues/1887)) * **maintainers:** fix release workflow rename +* **pypi:** add new links to Pypi package homepage ([#1912](https://github.com/awslabs/aws-lambda-powertools-python/issues/1912)) From 6f5c68636fb3e9d72e5bc8f1827004329a197755 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Feb 2023 21:03:51 +0000 Subject: [PATCH 21/35] chore(deps-dev): bump aws-cdk-lib from 2.63.2 to 2.64.0 (#1918) Bumps [aws-cdk-lib](https://github.com/aws/aws-cdk) from 2.63.2 to 2.64.0. - [Release notes](https://github.com/aws/aws-cdk/releases) - [Changelog](https://github.com/aws/aws-cdk/blob/main/CHANGELOG.v2.md) - [Commits](https://github.com/aws/aws-cdk/compare/v2.63.2...v2.64.0) --- updated-dependencies: - dependency-name: aws-cdk-lib dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index fdb322a4e34..bcd3836b4db 100644 --- a/poetry.lock +++ b/poetry.lock @@ -131,14 +131,14 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-lib" -version = "2.63.2" +version = "2.64.0" description = "Version 2 of the AWS Cloud Development Kit library" category = "dev" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk-lib-2.63.2.tar.gz", hash = "sha256:d84a352e817992727f0519240cf51c911fd4e30bc4d166ecb01552ac93eed795"}, - {file = "aws_cdk_lib-2.63.2-py3-none-any.whl", hash = "sha256:07e3151fbf962e6fe8c5e7c73701e810bc8fe927ab6391370510ee3a0665562f"}, + {file = "aws-cdk-lib-2.64.0.tar.gz", hash = "sha256:e21210765b362a0b8a7052fce7bd3a574ea5355a7e763e190c5051ee33b4868a"}, + {file = "aws_cdk_lib-2.64.0-py3-none-any.whl", hash = "sha256:e2de0a80eff201d2eb5326beffa5fb564231ae44eb08ed2b6d4da0c6a324e0e5"}, ] [package.dependencies] @@ -2846,4 +2846,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "6e8d5f7600adef0c3be0c13c848449ca968f8261fd9aad42f5378c3e3cecbcda" +content-hash = "981a38a9601c53e65d1c0f13c11e9f1956eda18b8e5cbc580e9c5a9028505586" diff --git a/pyproject.toml b/pyproject.toml index 983e05e200f..6b2bd95710a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,7 +63,7 @@ mkdocs-git-revision-date-plugin = "^0.3.2" mike = "^1.1.2" retry = "^0.9.2" pytest-xdist = "^3.2.0" -aws-cdk-lib = "^2.63.2" +aws-cdk-lib = "^2.64.0" "aws-cdk.aws-apigatewayv2-alpha" = "^2.38.1-alpha.0" "aws-cdk.aws-apigatewayv2-integrations-alpha" = "^2.38.1-alpha.0" "aws-cdk.aws-apigatewayv2-authorizers-alpha" = "^2.38.1-alpha.0" From 9517e3d311c7f6036b9e1749e880e0a2188c5701 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Feb 2023 21:22:49 +0000 Subject: [PATCH 22/35] chore(deps-dev): bump mkdocs-material from 9.0.11 to 9.0.12 (#1919) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index bcd3836b4db..fbc1aef492a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1463,14 +1463,14 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "9.0.11" +version = "9.0.12" description = "Documentation that simply works" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material-9.0.11-py3-none-any.whl", hash = "sha256:90a1e1ed41e90de5d0ab97c874b7bf6af488d0faf4aaea8e5868e01f3f1ed923"}, - {file = "mkdocs_material-9.0.11.tar.gz", hash = "sha256:aff49e4ce622a107ed563b3a6a37dc3660a45a0e4d9e7d4d2c13ce9dc02a7faf"}, + {file = "mkdocs_material-9.0.12-py3-none-any.whl", hash = "sha256:ff4233e4f4da0c879db0dbcb532a690a3f86f5a66a0cfcce99a124e82a462afb"}, + {file = "mkdocs_material-9.0.12.tar.gz", hash = "sha256:4da07b1390c6b78844f1566d723dd045572e645f31b108a1b5062fa7d11aa241"}, ] [package.dependencies] @@ -2846,4 +2846,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "981a38a9601c53e65d1c0f13c11e9f1956eda18b8e5cbc580e9c5a9028505586" +content-hash = "ddd991646d99a0521be85e8210ba52d1d85bd645d6e0624f44168fff1887af6c" diff --git a/pyproject.toml b/pyproject.toml index 6b2bd95710a..04b0af0fd10 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,7 +81,7 @@ mypy-boto3-s3 = "^1.26.62" mypy-boto3-xray = "^1.26.11" types-requests = "^2.28.11" typing-extensions = "^4.4.0" -mkdocs-material = "^9.0.11" +mkdocs-material = "^9.0.12" filelock = "^3.9.0" checksumdir = "^1.2.0" mypy-boto3-appconfigdata = "^1.26.0" From 32d11e0e341097f8cd7fa3dac16ce8b9835f328e Mon Sep 17 00:00:00 2001 From: Bakasura Date: Fri, 10 Feb 2023 06:43:35 -0500 Subject: [PATCH 23/35] feat(batch): add async_batch_processor for concurrent processing (#1724) Co-authored-by: Heitor Lessa Co-authored-by: h4x0r Co-authored-by: Heitor Lessa Co-authored-by: heitorlessa Co-authored-by: Leandro Damascena --- .../utilities/batch/__init__.py | 6 + aws_lambda_powertools/utilities/batch/base.py | 499 +++++++++++++----- docs/utilities/batch.md | 22 + .../getting_started_async_batch_processor.py | 25 + poetry.lock | 229 +++++--- pyproject.toml | 1 + tests/functional/test_utilities_batch.py | 94 +++- 7 files changed, 690 insertions(+), 186 deletions(-) create mode 100644 examples/batch_processing/src/getting_started_async_batch_processor.py diff --git a/aws_lambda_powertools/utilities/batch/__init__.py b/aws_lambda_powertools/utilities/batch/__init__.py index 08c35560b3f..02f3e786441 100644 --- a/aws_lambda_powertools/utilities/batch/__init__.py +++ b/aws_lambda_powertools/utilities/batch/__init__.py @@ -5,21 +5,27 @@ """ from aws_lambda_powertools.utilities.batch.base import ( + AsyncBatchProcessor, + BasePartialBatchProcessor, BasePartialProcessor, BatchProcessor, EventType, FailureResponse, SuccessResponse, + async_batch_processor, batch_processor, ) from aws_lambda_powertools.utilities.batch.exceptions import ExceptionInfo __all__ = ( "BatchProcessor", + "AsyncBatchProcessor", "BasePartialProcessor", + "BasePartialBatchProcessor", "ExceptionInfo", "EventType", "FailureResponse", "SuccessResponse", "batch_processor", + "async_batch_processor", ) diff --git a/aws_lambda_powertools/utilities/batch/base.py b/aws_lambda_powertools/utilities/batch/base.py index 4f9c4ca8780..171858c6d11 100644 --- a/aws_lambda_powertools/utilities/batch/base.py +++ b/aws_lambda_powertools/utilities/batch/base.py @@ -3,15 +3,29 @@ """ Batch processing utilities """ +import asyncio import copy import inspect import logging +import os import sys from abc import ABC, abstractmethod from enum import Enum -from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union, overload +from typing import ( + Any, + Awaitable, + Callable, + Dict, + List, + Optional, + Tuple, + Type, + Union, + overload, +) from aws_lambda_powertools.middleware_factory import lambda_handler_decorator +from aws_lambda_powertools.shared import constants from aws_lambda_powertools.utilities.batch.exceptions import ( BatchProcessingError, ExceptionInfo, @@ -100,6 +114,49 @@ def process(self) -> List[Tuple]: """ return [self._process_record(record) for record in self.records] + @abstractmethod + async def _async_process_record(self, record: dict): + """ + Async process record with handler. + """ + raise NotImplementedError() + + def async_process(self) -> List[Tuple]: + """ + Async call instance's handler for each record. + + Note + ---- + + We keep the outer function synchronous to prevent making Lambda handler async, so to not impact + customers' existing middlewares. Instead, we create an async closure to handle asynchrony. + + We also handle edge cases like Lambda container thaw by getting an existing or creating an event loop. + + See: https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtime-environment.html#runtimes-lifecycle-shutdown + """ + + async def async_process_closure(): + return list(await asyncio.gather(*[self._async_process_record(record) for record in self.records])) + + # WARNING + # Do not use "asyncio.run(async_process())" due to Lambda container thaws/freeze, otherwise we might get "Event Loop is closed" # noqa: E501 + # Instead, get_event_loop() can also create one if a previous was erroneously closed + # Mangum library does this as well. It's battle tested with other popular async-only frameworks like FastAPI + # https://github.com/jordaneremieff/mangum/discussions/256#discussioncomment-2638946 + # https://github.com/jordaneremieff/mangum/blob/b85cd4a97f8ddd56094ccc540ca7156c76081745/mangum/protocols/http.py#L44 + + # Let's prime the coroutine and decide + # whether we create an event loop (Lambda) or schedule it as usual (non-Lambda) + coro = async_process_closure() + if os.getenv(constants.LAMBDA_TASK_ROOT_ENV): + loop = asyncio.get_event_loop() # NOTE: this might return an error starting in Python 3.12 in a few years + task_instance = loop.create_task(coro) + return loop.run_until_complete(task_instance) + + # Non-Lambda environment, run coroutine as usual + return asyncio.run(coro) + def __enter__(self): self._prepare() return self @@ -191,9 +248,262 @@ def failure_handler(self, record, exception: ExceptionInfo) -> FailureResponse: return entry +class BasePartialBatchProcessor(BasePartialProcessor): # noqa + DEFAULT_RESPONSE: Dict[str, List[Optional[dict]]] = {"batchItemFailures": []} + + def __init__(self, event_type: EventType, model: Optional["BatchTypeModels"] = None): + """Process batch and partially report failed items + + Parameters + ---------- + event_type: EventType + Whether this is a SQS, DynamoDB Streams, or Kinesis Data Stream event + model: Optional["BatchTypeModels"] + Parser's data model using either SqsRecordModel, DynamoDBStreamRecordModel, KinesisDataStreamRecord + + Exceptions + ---------- + BatchProcessingError + Raised when the entire batch has failed processing + """ + self.event_type = event_type + self.model = model + self.batch_response = copy.deepcopy(self.DEFAULT_RESPONSE) + self._COLLECTOR_MAPPING = { + EventType.SQS: self._collect_sqs_failures, + EventType.KinesisDataStreams: self._collect_kinesis_failures, + EventType.DynamoDBStreams: self._collect_dynamodb_failures, + } + self._DATA_CLASS_MAPPING = { + EventType.SQS: SQSRecord, + EventType.KinesisDataStreams: KinesisStreamRecord, + EventType.DynamoDBStreams: DynamoDBRecord, + } + + super().__init__() + + def response(self): + """Batch items that failed processing, if any""" + return self.batch_response + + def _prepare(self): + """ + Remove results from previous execution. + """ + self.success_messages.clear() + self.fail_messages.clear() + self.exceptions.clear() + self.batch_response = copy.deepcopy(self.DEFAULT_RESPONSE) + + def _clean(self): + """ + Report messages to be deleted in case of partial failure. + """ + + if not self._has_messages_to_report(): + return + + if self._entire_batch_failed(): + raise BatchProcessingError( + msg=f"All records failed processing. {len(self.exceptions)} individual errors logged " + f"separately below.", + child_exceptions=self.exceptions, + ) + + messages = self._get_messages_to_report() + self.batch_response = {"batchItemFailures": messages} + + def _has_messages_to_report(self) -> bool: + if self.fail_messages: + return True + + logger.debug(f"All {len(self.success_messages)} records successfully processed") + return False + + def _entire_batch_failed(self) -> bool: + return len(self.exceptions) == len(self.records) + + def _get_messages_to_report(self) -> List[Dict[str, str]]: + """ + Format messages to use in batch deletion + """ + return self._COLLECTOR_MAPPING[self.event_type]() + + # Event Source Data Classes follow python idioms for fields + # while Parser/Pydantic follows the event field names to the latter + def _collect_sqs_failures(self): + failures = [] + for msg in self.fail_messages: + msg_id = msg.messageId if self.model else msg.message_id + failures.append({"itemIdentifier": msg_id}) + return failures + + def _collect_kinesis_failures(self): + failures = [] + for msg in self.fail_messages: + msg_id = msg.kinesis.sequenceNumber if self.model else msg.kinesis.sequence_number + failures.append({"itemIdentifier": msg_id}) + return failures + + def _collect_dynamodb_failures(self): + failures = [] + for msg in self.fail_messages: + msg_id = msg.dynamodb.SequenceNumber if self.model else msg.dynamodb.sequence_number + failures.append({"itemIdentifier": msg_id}) + return failures + + @overload + def _to_batch_type(self, record: dict, event_type: EventType, model: "BatchTypeModels") -> "BatchTypeModels": + ... # pragma: no cover + + @overload + def _to_batch_type(self, record: dict, event_type: EventType) -> EventSourceDataClassTypes: + ... # pragma: no cover + + def _to_batch_type(self, record: dict, event_type: EventType, model: Optional["BatchTypeModels"] = None): + if model is not None: + return model.parse_obj(record) + return self._DATA_CLASS_MAPPING[event_type](record) + + +class BatchProcessor(BasePartialBatchProcessor): # Keep old name for compatibility + """Process native partial responses from SQS, Kinesis Data Streams, and DynamoDB. + + Example + ------- + + ## Process batch triggered by SQS + + ```python + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.SQS) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context: LambdaContext): + return processor.response() + ``` + + ## Process batch triggered by Kinesis Data Streams + + ```python + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.KinesisDataStreams) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: KinesisStreamRecord): + logger.info(record.kinesis.data_as_text) + payload: dict = record.kinesis.data_as_json() + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context: LambdaContext): + return processor.response() + ``` + + ## Process batch triggered by DynamoDB Data Streams + + ```python + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.DynamoDBStreams) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: DynamoDBRecord): + logger.info(record.dynamodb.new_image) + payload: dict = json.loads(record.dynamodb.new_image.get("item")) + # alternatively: + # changes: Dict[str, Any] = record.dynamodb.new_image # noqa: E800 + # payload = change.get("Message") -> "" + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, processor=processor): + processed_messages = processor.process() # kick off processing, return list[tuple] + + return processor.response() + ``` + + + Raises + ------ + BatchProcessingError + When all batch records fail processing + + Limitations + ----------- + * Async record handler not supported, use AsyncBatchProcessor instead. + """ + + async def _async_process_record(self, record: dict): + raise NotImplementedError() + + def _process_record(self, record: dict) -> Union[SuccessResponse, FailureResponse]: + """ + Process a record with instance's handler + + Parameters + ---------- + record: dict + A batch record to be processed. + """ + data = self._to_batch_type(record=record, event_type=self.event_type, model=self.model) + try: + if self._handler_accepts_lambda_context: + result = self.handler(record=data, lambda_context=self.lambda_context) + else: + result = self.handler(record=data) + + return self.success_handler(record=record, result=result) + except Exception: + return self.failure_handler(record=data, exception=sys.exc_info()) + + @lambda_handler_decorator def batch_processor( - handler: Callable, event: Dict, context: LambdaContext, record_handler: Callable, processor: BasePartialProcessor + handler: Callable, event: Dict, context: LambdaContext, record_handler: Callable, processor: BatchProcessor ): """ Middleware to handle batch event processing @@ -207,8 +517,8 @@ def batch_processor( context: LambdaContext Lambda's Context record_handler: Callable - Callable to process each record from the batch - processor: BasePartialProcessor + Callable or corutine to process each record from the batch + processor: BatchProcessor Batch Processor to handle partial failure cases Examples @@ -226,8 +536,7 @@ def batch_processor( Limitations ----------- - * Async batch processors - + * Async batch processors. Use `async_batch_processor` instead. """ records = event["Records"] @@ -237,9 +546,8 @@ def batch_processor( return handler(event, context) -class BatchProcessor(BasePartialProcessor): - """Process native partial responses from SQS, Kinesis Data Streams, and DynamoDB. - +class AsyncBatchProcessor(BasePartialBatchProcessor): + """Process native partial responses from SQS, Kinesis Data Streams, and DynamoDB asynchronously. Example ------- @@ -261,7 +569,7 @@ class BatchProcessor(BasePartialProcessor): @tracer.capture_method - def record_handler(record: SQSRecord): + async def record_handler(record: SQSRecord): payload: str = record.body if payload: item: dict = json.loads(payload) @@ -291,7 +599,7 @@ def lambda_handler(event, context: LambdaContext): @tracer.capture_method - def record_handler(record: KinesisStreamRecord): + async def record_handler(record: KinesisStreamRecord): logger.info(record.kinesis.data_as_text) payload: dict = record.kinesis.data_as_json() ... @@ -303,7 +611,6 @@ def lambda_handler(event, context: LambdaContext): return processor.response() ``` - ## Process batch triggered by DynamoDB Data Streams ```python @@ -321,7 +628,7 @@ def lambda_handler(event, context: LambdaContext): @tracer.capture_method - def record_handler(record: DynamoDBRecord): + async def record_handler(record: DynamoDBRecord): logger.info(record.dynamodb.new_image) payload: dict = json.loads(record.dynamodb.new_image.get("item")) # alternatively: @@ -344,55 +651,16 @@ def lambda_handler(event, context: LambdaContext): ------ BatchProcessingError When all batch records fail processing - """ - - DEFAULT_RESPONSE: Dict[str, List[Optional[dict]]] = {"batchItemFailures": []} - - def __init__(self, event_type: EventType, model: Optional["BatchTypeModels"] = None): - """Process batch and partially report failed items - - Parameters - ---------- - event_type: EventType - Whether this is a SQS, DynamoDB Streams, or Kinesis Data Stream event - model: Optional["BatchTypeModels"] - Parser's data model using either SqsRecordModel, DynamoDBStreamRecordModel, KinesisDataStreamRecord - - Exceptions - ---------- - BatchProcessingError - Raised when the entire batch has failed processing - """ - self.event_type = event_type - self.model = model - self.batch_response = copy.deepcopy(self.DEFAULT_RESPONSE) - self._COLLECTOR_MAPPING = { - EventType.SQS: self._collect_sqs_failures, - EventType.KinesisDataStreams: self._collect_kinesis_failures, - EventType.DynamoDBStreams: self._collect_dynamodb_failures, - } - self._DATA_CLASS_MAPPING = { - EventType.SQS: SQSRecord, - EventType.KinesisDataStreams: KinesisStreamRecord, - EventType.DynamoDBStreams: DynamoDBRecord, - } - - super().__init__() - def response(self): - """Batch items that failed processing, if any""" - return self.batch_response + Limitations + ----------- + * Sync record handler not supported, use BatchProcessor instead. + """ - def _prepare(self): - """ - Remove results from previous execution. - """ - self.success_messages.clear() - self.fail_messages.clear() - self.exceptions.clear() - self.batch_response = copy.deepcopy(self.DEFAULT_RESPONSE) + def _process_record(self, record: dict): + raise NotImplementedError() - def _process_record(self, record: dict) -> Union[SuccessResponse, FailureResponse]: + async def _async_process_record(self, record: dict) -> Union[SuccessResponse, FailureResponse]: """ Process a record with instance's handler @@ -404,80 +672,59 @@ def _process_record(self, record: dict) -> Union[SuccessResponse, FailureRespons data = self._to_batch_type(record=record, event_type=self.event_type, model=self.model) try: if self._handler_accepts_lambda_context: - result = self.handler(record=data, lambda_context=self.lambda_context) + result = await self.handler(record=data, lambda_context=self.lambda_context) else: - result = self.handler(record=data) + result = await self.handler(record=data) return self.success_handler(record=record, result=result) except Exception: return self.failure_handler(record=data, exception=sys.exc_info()) - def _clean(self): - """ - Report messages to be deleted in case of partial failure. - """ - - if not self._has_messages_to_report(): - return - - if self._entire_batch_failed(): - raise BatchProcessingError( - msg=f"All records failed processing. {len(self.exceptions)} individual errors logged " - f"separately below.", - child_exceptions=self.exceptions, - ) - - messages = self._get_messages_to_report() - self.batch_response = {"batchItemFailures": messages} - - def _has_messages_to_report(self) -> bool: - if self.fail_messages: - return True - - logger.debug(f"All {len(self.success_messages)} records successfully processed") - return False - - def _entire_batch_failed(self) -> bool: - return len(self.exceptions) == len(self.records) - - def _get_messages_to_report(self) -> List[Dict[str, str]]: - """ - Format messages to use in batch deletion - """ - return self._COLLECTOR_MAPPING[self.event_type]() - - # Event Source Data Classes follow python idioms for fields - # while Parser/Pydantic follows the event field names to the latter - def _collect_sqs_failures(self): - failures = [] - for msg in self.fail_messages: - msg_id = msg.messageId if self.model else msg.message_id - failures.append({"itemIdentifier": msg_id}) - return failures - - def _collect_kinesis_failures(self): - failures = [] - for msg in self.fail_messages: - msg_id = msg.kinesis.sequenceNumber if self.model else msg.kinesis.sequence_number - failures.append({"itemIdentifier": msg_id}) - return failures - def _collect_dynamodb_failures(self): - failures = [] - for msg in self.fail_messages: - msg_id = msg.dynamodb.SequenceNumber if self.model else msg.dynamodb.sequence_number - failures.append({"itemIdentifier": msg_id}) - return failures +@lambda_handler_decorator +def async_batch_processor( + handler: Callable, + event: Dict, + context: LambdaContext, + record_handler: Callable[..., Awaitable[Any]], + processor: AsyncBatchProcessor, +): + """ + Middleware to handle batch event processing + Parameters + ---------- + handler: Callable + Lambda's handler + event: Dict + Lambda's Event + context: LambdaContext + Lambda's Context + record_handler: Callable[..., Awaitable[Any]] + Callable to process each record from the batch + processor: AsyncBatchProcessor + Batch Processor to handle partial failure cases + Examples + -------- + **Processes Lambda's event with a BasePartialProcessor** + >>> from aws_lambda_powertools.utilities.batch import async_batch_processor, AsyncBatchProcessor + >>> + >>> async def async_record_handler(record): + >>> payload: str = record.body + >>> return payload + >>> + >>> processor = AsyncBatchProcessor(event_type=EventType.SQS) + >>> + >>> @async_batch_processor(record_handler=async_record_handler, processor=processor) + >>> async def lambda_handler(event, context: LambdaContext): + >>> return processor.response() - @overload - def _to_batch_type(self, record: dict, event_type: EventType, model: "BatchTypeModels") -> "BatchTypeModels": - ... # pragma: no cover + Limitations + ----------- + * Sync batch processors. Use `batch_processor` instead. + """ + records = event["Records"] - @overload - def _to_batch_type(self, record: dict, event_type: EventType) -> EventSourceDataClassTypes: - ... # pragma: no cover + with processor(records, record_handler, lambda_context=context): + processor.async_process() - def _to_batch_type(self, record: dict, event_type: EventType, model: Optional["BatchTypeModels"] = None): - if model is not None: - return model.parse_obj(record) - return self._DATA_CLASS_MAPPING[event_type](record) + return handler(event, context) diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md index 988b1937b5b..4a53e053f44 100644 --- a/docs/utilities/batch.md +++ b/docs/utilities/batch.md @@ -636,6 +636,28 @@ All records in the batch will be passed to this handler for processing, even if All processing logic will and should be performed by the `record_handler` function. +### Processing messages asynchronously + +!!! tip "New to AsyncIO? Read this [comprehensive guide first](https://realpython.com/async-io-python/){target="_blank"}." + +You can use `AsyncBatchProcessor` class and `async_batch_processor` decorator to process messages concurrently. + +???+ question "When is this useful?" + Your use case might be able to process multiple records at the same time without conflicting with one another. + + For example, imagine you need to process multiple loyalty points and incrementally save in a database. While you await the database to confirm your records are saved, you could start processing another request concurrently. + + The reason this is not the default behaviour is that not all use cases can handle concurrency safely (e.g., loyalty points must be updated in order). + +```python hl_lines="4 6 11 14 23" title="High-concurrency with AsyncBatchProcessor" +--8<-- "examples/batch_processing/src/getting_started_async_batch_processor.py" +``` + +???+ warning "Using tracer?" + `AsyncBatchProcessor` uses `asyncio.gather` which can cause side effects and reach trace limits at high concurrency. + + See [Tracing concurrent asynchronous functions](../core/tracer.md#concurrent-asynchronous-functions). + ## Advanced ### Pydantic integration diff --git a/examples/batch_processing/src/getting_started_async_batch_processor.py b/examples/batch_processing/src/getting_started_async_batch_processor.py new file mode 100644 index 00000000000..594be0540f3 --- /dev/null +++ b/examples/batch_processing/src/getting_started_async_batch_processor.py @@ -0,0 +1,25 @@ +import httpx # external dependency + +from aws_lambda_powertools.utilities.batch import ( + AsyncBatchProcessor, + EventType, + async_batch_processor, +) +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = AsyncBatchProcessor(event_type=EventType.SQS) + + +async def async_record_handler(record: SQSRecord): + # Yield control back to the event loop to schedule other tasks + # while you await from a response from httpbin.org + async with httpx.AsyncClient() as client: + ret = await client.get("https://httpbin.org/get") + + return ret.status_code + + +@async_batch_processor(record_handler=async_record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/poetry.lock b/poetry.lock index fbc1aef492a..49a3b94e304 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,27 @@ # This file is automatically @generated by Poetry and should not be changed by hand. +[[package]] +name = "anyio" +version = "3.6.2" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "dev" +optional = false +python-versions = ">=3.6.2" +files = [ + {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, + {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16,<0.22)"] + [[package]] name = "attrs" version = "22.2.0" @@ -21,18 +43,18 @@ tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy [[package]] name = "aws-cdk-asset-awscli-v1" -version = "2.2.52" +version = "2.2.63" description = "A library that contains the AWS CLI for use in Lambda Layers" category = "dev" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.asset-awscli-v1-2.2.52.tar.gz", hash = "sha256:ab04beec8e267e363931df2caf48a24100cb5799d7fd8db51efe881d117efa7a"}, - {file = "aws_cdk.asset_awscli_v1-2.2.52-py3-none-any.whl", hash = "sha256:6e9d686bb0b00242e869e91d57b65b619ffb42e99abe482436e3a6692485dbfe"}, + {file = "aws-cdk.asset-awscli-v1-2.2.63.tar.gz", hash = "sha256:76154ade5391f8927c932b609028b28426af34215f144d07576ba35e4eca9442"}, + {file = "aws_cdk.asset_awscli_v1-2.2.63-py3-none-any.whl", hash = "sha256:1ad1d5b7287097f6546902801a40f39b6580c99e5d0eb07dfc5e8ddf428167b0"}, ] [package.dependencies] -jsii = ">=1.73.0,<2.0.0" +jsii = ">=1.74.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" @@ -55,77 +77,77 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-asset-node-proxy-agent-v5" -version = "2.0.42" +version = "2.0.52" description = "@aws-cdk/asset-node-proxy-agent-v5" category = "dev" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.asset-node-proxy-agent-v5-2.0.42.tar.gz", hash = "sha256:ae1b615be42e78681e05b145460603f171c06b671a2d1caa060a159b94b06366"}, - {file = "aws_cdk.asset_node_proxy_agent_v5-2.0.42-py3-none-any.whl", hash = "sha256:6e0174802097d558daa1be5c4e6e7f309eeba626392955e596bf967ee37865d3"}, + {file = "aws-cdk.asset-node-proxy-agent-v5-2.0.52.tar.gz", hash = "sha256:1346ce52303e8b8c7c88ce16599a36d947e9546fc6cae0965182594d7b0e600d"}, + {file = "aws_cdk.asset_node_proxy_agent_v5-2.0.52-py3-none-any.whl", hash = "sha256:1a08b261ea2bf10f07fe89a7502686e6be2adea636e6bb1ee1f56b678231fe02"}, ] [package.dependencies] -jsii = ">=1.73.0,<2.0.0" +jsii = ">=1.74.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-alpha" -version = "2.62.2a0" +version = "2.64.0a0" description = "The CDK Construct Library for AWS::APIGatewayv2" category = "dev" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-alpha-2.62.2a0.tar.gz", hash = "sha256:63c191fdcb8b20d1afd34af84ae465740b14009a06af7bdc8e78475614f85a23"}, - {file = "aws_cdk.aws_apigatewayv2_alpha-2.62.2a0-py3-none-any.whl", hash = "sha256:32ff5d8745b71ef30ba009de4d8d9f12bd34a4f3c940500ba34367211f05c9f4"}, + {file = "aws-cdk.aws-apigatewayv2-alpha-2.64.0a0.tar.gz", hash = "sha256:7e33fb04b10c1668abe334e25a998967b51aeed76243fc591b66705c8d6241d4"}, + {file = "aws_cdk.aws_apigatewayv2_alpha-2.64.0a0-py3-none-any.whl", hash = "sha256:88f72a435fc91f7c02a8f1fb564958ac1c8125c5319021d61b67d00466185199"}, ] [package.dependencies] -aws-cdk-lib = ">=2.62.2,<3.0.0" +aws-cdk-lib = ">=2.64.0,<3.0.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.73.0,<2.0.0" +jsii = ">=1.74.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-authorizers-alpha" -version = "2.62.2a0" +version = "2.64.0a0" description = "Authorizers for AWS APIGateway V2" category = "dev" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.62.2a0.tar.gz", hash = "sha256:9a4ba121c49e4ba866b985495b87e9ecaec50c1f26e0d8cb116e15492196c042"}, - {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.62.2a0-py3-none-any.whl", hash = "sha256:9cfb1495b618880b395d6ecbd45c3c524c67013f2567eae6e19e6f06586b9a38"}, + {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.64.0a0.tar.gz", hash = "sha256:670ee77f19818723aeeea47fbac1441d58f39b5eff79332e15196452ec6183bf"}, + {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.64.0a0-py3-none-any.whl", hash = "sha256:e2377441ad33aa43453f5c501e00a9a0c261627e78b2080617edd6e09949c139"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.62.2.a0" -aws-cdk-lib = ">=2.62.2,<3.0.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.64.0.a0" +aws-cdk-lib = ">=2.64.0,<3.0.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.73.0,<2.0.0" +jsii = ">=1.74.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-integrations-alpha" -version = "2.62.2a0" +version = "2.64.0a0" description = "Integrations for AWS APIGateway V2" category = "dev" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.62.2a0.tar.gz", hash = "sha256:4ae06b6585664c659eb6b88ff70eaa628a96ffb4728ab0d0eb7ff1f23913565b"}, - {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.62.2a0-py3-none-any.whl", hash = "sha256:497e93d193895b1b38545d5ca152e31f575b971ce371ad655aeb3bbed7fc6052"}, + {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.64.0a0.tar.gz", hash = "sha256:1826fa641a0e849cff90e681033066fa3fea44bca447c6696681dddf862df364"}, + {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.64.0a0-py3-none-any.whl", hash = "sha256:a34f87cafbbdf76078ce564642f7f11771f4693a04bb7f41eca7d76b26ffe562"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.62.2.a0" -aws-cdk-lib = ">=2.62.2,<3.0.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.64.0.a0" +aws-cdk-lib = ">=2.64.0,<3.0.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.73.0,<2.0.0" +jsii = ">=1.74.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" @@ -167,21 +189,20 @@ requests = ">=0.14.0" [[package]] name = "aws-sam-translator" -version = "1.58.1" +version = "1.59.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" category = "dev" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "aws-sam-translator-1.58.1.tar.gz", hash = "sha256:cd60a19085d432bc00769b597bc2e6854f546ff9928f8067fc5fbcb5a1ed74ff"}, - {file = "aws_sam_translator-1.58.1-py2-none-any.whl", hash = "sha256:c4e261e450d574572d389edcafab04d1fe337615f867610410390c2435cb1f26"}, - {file = "aws_sam_translator-1.58.1-py3-none-any.whl", hash = "sha256:ca47d6eb04d8cf358bea9160411193da40a80dc3e79bb0c5bace0c21f0e4c888"}, + {file = "aws-sam-translator-1.59.0.tar.gz", hash = "sha256:9b8f23a5754cba92677d334ece5c5d9dc9b1f1a327a650fc8939ae3fc6da4141"}, + {file = "aws_sam_translator-1.59.0-py3-none-any.whl", hash = "sha256:6761293a21bd1cb0e19f168926ebfc4a3a6c9011aca67bd448ef485a55d6f658"}, ] [package.dependencies] boto3 = ">=1.19.5,<2.0.0" jsonschema = ">=3.2,<5" -pydantic = ">=1.10.2,<1.11.0" +pydantic = ">=1.8,<2.0" typing-extensions = ">=4.4.0,<4.5.0" [package.extras] @@ -279,18 +300,18 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.26.60" +version = "1.26.68" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.26.60-py3-none-any.whl", hash = "sha256:5fd2810217a74a38078a19fb85a9e5d6934d0c146eb060967a3ffd7ab33cdf00"}, - {file = "boto3-1.26.60.tar.gz", hash = "sha256:f0824b3bcf803800d3ecef903b4840427e4b3d37a069f6fc9a86310f7e036ad5"}, + {file = "boto3-1.26.68-py3-none-any.whl", hash = "sha256:bbb426a9b3afd3ccbac25e03b215d79e90b4c47905b1b08b3b9d86fc74096974"}, + {file = "boto3-1.26.68.tar.gz", hash = "sha256:c92dd0fde7839c0ca9c16a989d67ceb7f80f53de19f2b087fd1182f2af41b2ae"}, ] [package.dependencies] -botocore = ">=1.29.60,<1.30.0" +botocore = ">=1.29.68,<1.30.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -299,14 +320,14 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.29.60" +version = "1.29.68" description = "Low-level, data-driven core of boto 3." category = "main" optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.29.60-py3-none-any.whl", hash = "sha256:c4ae251e7df0cf01d893eb945bc8f23c14989ed349775a8e16c949f08a068f9a"}, - {file = "botocore-1.29.60.tar.gz", hash = "sha256:a21217ccf4613c9ebbe4c3192e13ba91d46be642560e39a16406662a398a107b"}, + {file = "botocore-1.29.68-py3-none-any.whl", hash = "sha256:08fa8302a22553e69b70b1de2cc8cec61a3a878546658d091473e13d5b9d2ca4"}, + {file = "botocore-1.29.68.tar.gz", hash = "sha256:8f5cb96dc0862809d29fe512087c77c15fe6328a2d8238f0a96cccb6eb77ec12"}, ] [package.dependencies] @@ -315,7 +336,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.15.3)"] +crt = ["awscrt (==0.16.9)"] [[package]] name = "cattrs" @@ -508,14 +529,14 @@ files = [ [[package]] name = "constructs" -version = "10.1.236" +version = "10.1.246" description = "A programming model for software-defined state" category = "dev" optional = false python-versions = "~=3.7" files = [ - {file = "constructs-10.1.236-py3-none-any.whl", hash = "sha256:e51d8fac38b12a88359d5d2bedb535987eaa54e68631add29726652be66490e9"}, - {file = "constructs-10.1.236.tar.gz", hash = "sha256:10b3c5ed3d4c6fd930bd8f59c8a5926028dafe8a5bf703fba5bcc53c89fce002"}, + {file = "constructs-10.1.246-py3-none-any.whl", hash = "sha256:f07c7c4aa2d22ff960a9f51f7011030b4a3d8cc6df0e0a84e30ea63c2c8c8456"}, + {file = "constructs-10.1.246.tar.gz", hash = "sha256:26d0b017eef92bde3ece7454b524dddc051425819c59932ebe3c1ff6f9e1cb4a"}, ] [package.dependencies] @@ -901,6 +922,67 @@ files = [ gitdb = ">=4.0.1,<5" typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "httpcore" +version = "0.16.3" +description = "A minimal low-level HTTP client." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, + {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, +] + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "httpx" +version = "0.23.3" +description = "The next generation HTTP client." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, + {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, +] + +[package.dependencies] +certifi = "*" +httpcore = ">=0.15.0,<0.17.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + [[package]] name = "hvac" version = "1.0.2" @@ -1233,7 +1315,6 @@ category = "dev" optional = false python-versions = "*" files = [ - {file = "junit-xml-1.9.tar.gz", hash = "sha256:de16a051990d4e25a3982b2dd9e89d671067548718866416faec14d9de56db9f"}, {file = "junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732"}, ] @@ -1708,14 +1789,14 @@ typing-extensions = ">=4.1.0" [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] @@ -1803,22 +1884,22 @@ files = [ [[package]] name = "platformdirs" -version = "2.6.2" +version = "3.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, - {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, + {file = "platformdirs-3.0.0-py3-none-any.whl", hash = "sha256:b1d5eb14f221506f50d6604a561f4c5786d9e80355219694a1b244bcd96f4567"}, + {file = "platformdirs-3.0.0.tar.gz", hash = "sha256:8a1228abb1ef82d788f74139988b137e78692984ec7b08eaa6c65f1723af28f9"}, ] [package.dependencies] typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""} [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -2469,6 +2550,24 @@ files = [ decorator = ">=3.4.2" py = ">=1.4.26,<2.0.0" +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + [[package]] name = "s3transfer" version = "0.6.0" @@ -2527,6 +2626,18 @@ files = [ {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + [[package]] name = "stevedore" version = "3.5.2" @@ -2634,14 +2745,14 @@ types-urllib3 = "<1.27" [[package]] name = "types-urllib3" -version = "1.26.25.4" +version = "1.26.25.5" description = "Typing stubs for urllib3" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-urllib3-1.26.25.4.tar.gz", hash = "sha256:eec5556428eec862b1ac578fb69aab3877995a99ffec9e5a12cf7fbd0cc9daee"}, - {file = "types_urllib3-1.26.25.4-py3-none-any.whl", hash = "sha256:ed6b9e8a8be488796f72306889a06a3fc3cb1aa99af02ab8afb50144d7317e49"}, + {file = "types-urllib3-1.26.25.5.tar.gz", hash = "sha256:5630e578246d170d91ebe3901788cd28d53c4e044dc2e2488e3b0d55fb6895d8"}, + {file = "types_urllib3-1.26.25.5-py3-none-any.whl", hash = "sha256:e8f25c8bb85cde658c72ee931e56e7abd28803c26032441eea9ff4a4df2b0c31"}, ] [[package]] @@ -2822,14 +2933,14 @@ requests = ">=2.0,<3.0" [[package]] name = "zipp" -version = "3.12.0" +version = "3.13.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "zipp-3.12.0-py3-none-any.whl", hash = "sha256:9eb0a4c5feab9b08871db0d672745b53450d7f26992fd1e4653aa43345e97b86"}, - {file = "zipp-3.12.0.tar.gz", hash = "sha256:73efd63936398aac78fd92b6f4865190119d6c91b531532e798977ea8dd402eb"}, + {file = "zipp-3.13.0-py3-none-any.whl", hash = "sha256:e8b2a36ea17df80ffe9e2c4fda3f693c3dad6df1697d3cd3af232db680950b0b"}, + {file = "zipp-3.13.0.tar.gz", hash = "sha256:23f70e964bc11a34cef175bc90ba2914e1e4545ea1e3e2f67c079671883f9cb6"}, ] [package.extras] @@ -2846,4 +2957,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "ddd991646d99a0521be85e8210ba52d1d85bd645d6e0624f44168fff1887af6c" +content-hash = "62a6b0896bad16de0b814e025384cc7c078c72cead1e5c4926700c118d8b7dda" diff --git a/pyproject.toml b/pyproject.toml index 04b0af0fd10..e8fdc91ca0c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,6 +103,7 @@ aws-sdk = ["boto3"] cfn-lint = "0.67.0" mypy = "^0.982" types-python-dateutil = "^2.8.19.6" +httpx = "^0.23.3" [tool.coverage.run] source = ["aws_lambda_powertools"] diff --git a/tests/functional/test_utilities_batch.py b/tests/functional/test_utilities_batch.py index 1d50de9e85e..6dcfc3d179d 100644 --- a/tests/functional/test_utilities_batch.py +++ b/tests/functional/test_utilities_batch.py @@ -1,13 +1,15 @@ import json from random import randint -from typing import Callable, Dict, Optional +from typing import Any, Awaitable, Callable, Dict, Optional import pytest from botocore.config import Config from aws_lambda_powertools.utilities.batch import ( + AsyncBatchProcessor, BatchProcessor, EventType, + async_batch_processor, batch_processor, ) from aws_lambda_powertools.utilities.batch.exceptions import BatchProcessingError @@ -115,6 +117,17 @@ def handler(record): return handler +@pytest.fixture(scope="module") +def async_record_handler() -> Callable[..., Awaitable[Any]]: + async def handler(record): + body = record["body"] + if "fail" in body: + raise Exception("Failed to process record.") + return body + + return handler + + @pytest.fixture(scope="module") def kinesis_record_handler() -> Callable: def handler(record: KinesisStreamRecord): @@ -639,3 +652,82 @@ def lambda_handler(event, context): # THEN raise BatchProcessingError assert "All records failed processing. " in str(e.value) + + +def test_async_batch_processor_middleware_success_only(sqs_event_factory, async_record_handler): + # GIVEN + first_record = SQSRecord(sqs_event_factory("success")) + second_record = SQSRecord(sqs_event_factory("success")) + event = {"Records": [first_record.raw_event, second_record.raw_event]} + + processor = AsyncBatchProcessor(event_type=EventType.SQS) + + @async_batch_processor(record_handler=async_record_handler, processor=processor) + def lambda_handler(event, context): + return processor.response() + + # WHEN + result = lambda_handler(event, {}) + + # THEN + assert result["batchItemFailures"] == [] + + +def test_async_batch_processor_middleware_with_failure(sqs_event_factory, async_record_handler): + # GIVEN + first_record = SQSRecord(sqs_event_factory("fail")) + second_record = SQSRecord(sqs_event_factory("success")) + third_record = SQSRecord(sqs_event_factory("fail")) + event = {"Records": [first_record.raw_event, second_record.raw_event, third_record.raw_event]} + + processor = AsyncBatchProcessor(event_type=EventType.SQS) + + @async_batch_processor(record_handler=async_record_handler, processor=processor) + def lambda_handler(event, context): + return processor.response() + + # WHEN + result = lambda_handler(event, {}) + + # THEN + assert len(result["batchItemFailures"]) == 2 + + +def test_async_batch_processor_context_success_only(sqs_event_factory, async_record_handler): + # GIVEN + first_record = SQSRecord(sqs_event_factory("success")) + second_record = SQSRecord(sqs_event_factory("success")) + records = [first_record.raw_event, second_record.raw_event] + processor = AsyncBatchProcessor(event_type=EventType.SQS) + + # WHEN + with processor(records, async_record_handler) as batch: + processed_messages = batch.async_process() + + # THEN + assert processed_messages == [ + ("success", first_record.body, first_record.raw_event), + ("success", second_record.body, second_record.raw_event), + ] + + assert batch.response() == {"batchItemFailures": []} + + +def test_async_batch_processor_context_with_failure(sqs_event_factory, async_record_handler): + # GIVEN + first_record = SQSRecord(sqs_event_factory("failure")) + second_record = SQSRecord(sqs_event_factory("success")) + third_record = SQSRecord(sqs_event_factory("fail")) + records = [first_record.raw_event, second_record.raw_event, third_record.raw_event] + processor = AsyncBatchProcessor(event_type=EventType.SQS) + + # WHEN + with processor(records, async_record_handler) as batch: + processed_messages = batch.async_process() + + # THEN + assert processed_messages[1] == ("success", second_record.body, second_record.raw_event) + assert len(batch.fail_messages) == 2 + assert batch.response() == { + "batchItemFailures": [{"itemIdentifier": first_record.message_id}, {"itemIdentifier": third_record.message_id}] + } From 8689708503176942460b93942631f8b2d72d3093 Mon Sep 17 00:00:00 2001 From: Release bot Date: Fri, 10 Feb 2023 11:44:02 +0000 Subject: [PATCH 24/35] update changelog with latest changes --- CHANGELOG.md | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f0eaf6e97cf..c7be91807be 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,19 +15,22 @@ ## Features +* **batch:** add async_batch_processor for concurrent processing ([#1724](https://github.com/awslabs/aws-lambda-powertools-python/issues/1724)) * **metrics:** add default_dimensions to single_metric ([#1880](https://github.com/awslabs/aws-lambda-powertools-python/issues/1880)) ## Maintenance * **deps:** bump docker/setup-buildx-action from 2.4.0 to 2.4.1 ([#1903](https://github.com/awslabs/aws-lambda-powertools-python/issues/1903)) -* **deps-dev:** bump mypy-boto3-appconfig from 1.26.0.post1 to 1.26.63 ([#1895](https://github.com/awslabs/aws-lambda-powertools-python/issues/1895)) +* **deps-dev:** bump mkdocs-material from 9.0.11 to 9.0.12 ([#1919](https://github.com/awslabs/aws-lambda-powertools-python/issues/1919)) +* **deps-dev:** bump black from 22.12.0 to 23.1.0 ([#1886](https://github.com/awslabs/aws-lambda-powertools-python/issues/1886)) * **deps-dev:** bump types-requests from 2.28.11.8 to 2.28.11.12 ([#1906](https://github.com/awslabs/aws-lambda-powertools-python/issues/1906)) * **deps-dev:** bump pytest-xdist from 3.1.0 to 3.2.0 ([#1905](https://github.com/awslabs/aws-lambda-powertools-python/issues/1905)) +* **deps-dev:** bump aws-cdk-lib from 2.63.2 to 2.64.0 ([#1918](https://github.com/awslabs/aws-lambda-powertools-python/issues/1918)) * **deps-dev:** bump aws-cdk-lib from 2.63.0 to 2.63.2 ([#1904](https://github.com/awslabs/aws-lambda-powertools-python/issues/1904)) * **deps-dev:** bump mkdocs-material from 9.0.10 to 9.0.11 ([#1896](https://github.com/awslabs/aws-lambda-powertools-python/issues/1896)) -* **deps-dev:** bump mkdocs-material from 9.0.9 to 9.0.10 ([#1888](https://github.com/awslabs/aws-lambda-powertools-python/issues/1888)) +* **deps-dev:** bump mypy-boto3-appconfig from 1.26.0.post1 to 1.26.63 ([#1895](https://github.com/awslabs/aws-lambda-powertools-python/issues/1895)) * **deps-dev:** bump mypy-boto3-s3 from 1.26.58 to 1.26.62 ([#1889](https://github.com/awslabs/aws-lambda-powertools-python/issues/1889)) -* **deps-dev:** bump black from 22.12.0 to 23.1.0 ([#1886](https://github.com/awslabs/aws-lambda-powertools-python/issues/1886)) +* **deps-dev:** bump mkdocs-material from 9.0.9 to 9.0.10 ([#1888](https://github.com/awslabs/aws-lambda-powertools-python/issues/1888)) * **deps-dev:** bump aws-cdk-lib from 2.62.2 to 2.63.0 ([#1887](https://github.com/awslabs/aws-lambda-powertools-python/issues/1887)) * **maintainers:** fix release workflow rename * **pypi:** add new links to Pypi package homepage ([#1912](https://github.com/awslabs/aws-lambda-powertools-python/issues/1912)) From a23f0bfecd0f97debe3fceb994cc3fd72ce061b5 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Fri, 10 Feb 2023 15:07:46 +0000 Subject: [PATCH 25/35] feat(metrics) - add support for high resolution metrics (#1915) Co-authored-by: heitorlessa --- aws_lambda_powertools/metrics/__init__.py | 11 +- aws_lambda_powertools/metrics/base.py | 118 +++++++++++++++--- aws_lambda_powertools/metrics/exceptions.py | 6 + aws_lambda_powertools/metrics/metrics.py | 4 +- aws_lambda_powertools/metrics/types.py | 7 ++ docs/core/metrics.md | 19 +++ .../metrics/src/add_high_resolution_metric.py | 10 ++ tests/functional/test_metrics.py | 97 +++++++++++++- 8 files changed, 252 insertions(+), 20 deletions(-) create mode 100644 aws_lambda_powertools/metrics/types.py create mode 100644 examples/metrics/src/add_high_resolution_metric.py diff --git a/aws_lambda_powertools/metrics/__init__.py b/aws_lambda_powertools/metrics/__init__.py index 3315899da0b..5f30f14102d 100644 --- a/aws_lambda_powertools/metrics/__init__.py +++ b/aws_lambda_powertools/metrics/__init__.py @@ -1,7 +1,12 @@ """CloudWatch Embedded Metric Format utility """ -from .base import MetricUnit -from .exceptions import MetricUnitError, MetricValueError, SchemaValidationError +from .base import MetricResolution, MetricUnit +from .exceptions import ( + MetricResolutionError, + MetricUnitError, + MetricValueError, + SchemaValidationError, +) from .metric import single_metric from .metrics import EphemeralMetrics, Metrics @@ -11,6 +16,8 @@ "single_metric", "MetricUnit", "MetricUnitError", + "MetricResolution", + "MetricResolutionError", "SchemaValidationError", "MetricValueError", ] diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py index c2949ab43da..67dcb47c282 100644 --- a/aws_lambda_powertools/metrics/base.py +++ b/aws_lambda_powertools/metrics/base.py @@ -12,7 +12,13 @@ from ..shared import constants from ..shared.functions import resolve_env_var_choice -from .exceptions import MetricUnitError, MetricValueError, SchemaValidationError +from .exceptions import ( + MetricResolutionError, + MetricUnitError, + MetricValueError, + SchemaValidationError, +) +from .types import MetricNameUnitResolution logger = logging.getLogger(__name__) @@ -22,6 +28,11 @@ is_cold_start = True +class MetricResolution(Enum): + Standard = 60 + High = 1 + + class MetricUnit(Enum): Seconds = "Seconds" Microseconds = "Microseconds" @@ -72,7 +83,9 @@ class MetricManager: Raises ------ MetricUnitError - When metric metric isn't supported by CloudWatch + When metric unit isn't supported by CloudWatch + MetricResolutionError + When metric resolution isn't supported by CloudWatch MetricValueError When metric value isn't a number SchemaValidationError @@ -93,9 +106,16 @@ def __init__( self.service = resolve_env_var_choice(choice=service, env=os.getenv(constants.SERVICE_NAME_ENV)) self.metadata_set = metadata_set if metadata_set is not None else {} self._metric_units = [unit.value for unit in MetricUnit] - self._metric_unit_options = list(MetricUnit.__members__) + self._metric_unit_valid_options = list(MetricUnit.__members__) + self._metric_resolutions = [resolution.value for resolution in MetricResolution] - def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> None: + def add_metric( + self, + name: str, + unit: Union[MetricUnit, str], + value: float, + resolution: Union[MetricResolution, int] = 60, + ) -> None: """Adds given metric Example @@ -108,6 +128,10 @@ def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> N metric.add_metric(name="BookingConfirmation", unit="Count", value=1) + **Add given metric with MetricResolution non default value** + + metric.add_metric(name="BookingConfirmation", unit="Count", value=1, resolution=MetricResolution.High) + Parameters ---------- name : str @@ -116,18 +140,24 @@ def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> N `aws_lambda_powertools.helper.models.MetricUnit` value : float Metric value + resolution : Union[MetricResolution, int] + `aws_lambda_powertools.helper.models.MetricResolution` Raises ------ MetricUnitError When metric unit is not supported by CloudWatch + MetricResolutionError + When metric resolution is not supported by CloudWatch """ if not isinstance(value, numbers.Number): raise MetricValueError(f"{value} is not a valid number") unit = self._extract_metric_unit_value(unit=unit) + resolution = self._extract_metric_resolution_value(resolution=resolution) metric: Dict = self.metric_set.get(name, defaultdict(list)) metric["Unit"] = unit + metric["StorageResolution"] = resolution metric["Value"].append(float(value)) logger.debug(f"Adding metric: {name} with {metric}") self.metric_set[name] = metric @@ -194,15 +224,28 @@ def serialize_metric_set( logger.debug({"details": "Serializing metrics", "metrics": metrics, "dimensions": dimensions}) - metric_names_and_units: List[Dict[str, str]] = [] # [ { "Name": "metric_name", "Unit": "Count" } ] + # For standard resolution metrics, don't add StorageResolution field to avoid unnecessary ingestion of data into cloudwatch # noqa E501 + # Example: [ { "Name": "metric_name", "Unit": "Count"} ] # noqa E800 + # + # In case using high-resolution metrics, add StorageResolution field + # Example: [ { "Name": "metric_name", "Unit": "Count", "StorageResolution": 1 } ] # noqa E800 + metric_definition: List[MetricNameUnitResolution] = [] metric_names_and_values: Dict[str, float] = {} # { "metric_name": 1.0 } for metric_name in metrics: metric: dict = metrics[metric_name] metric_value: int = metric.get("Value", 0) metric_unit: str = metric.get("Unit", "") + metric_resolution: int = metric.get("StorageResolution", 60) + + metric_definition_data: MetricNameUnitResolution = {"Name": metric_name, "Unit": metric_unit} + + # high-resolution metrics + if metric_resolution == 1: + metric_definition_data["StorageResolution"] = metric_resolution + + metric_definition.append(metric_definition_data) - metric_names_and_units.append({"Name": metric_name, "Unit": metric_unit}) metric_names_and_values.update({metric_name: metric_value}) return { @@ -212,7 +255,7 @@ def serialize_metric_set( { "Namespace": self.namespace, # "test_namespace" "Dimensions": [list(dimensions.keys())], # [ "service" ] - "Metrics": metric_names_and_units, + "Metrics": metric_definition, } ], }, @@ -358,6 +401,34 @@ def decorate(event, context): return decorate + def _extract_metric_resolution_value(self, resolution: Union[int, MetricResolution]) -> int: + """Return metric value from metric unit whether that's str or MetricResolution enum + + Parameters + ---------- + unit : Union[int, MetricResolution] + Metric resolution + + Returns + ------- + int + Metric resolution value must be 1 or 60 + + Raises + ------ + MetricResolutionError + When metric resolution is not supported by CloudWatch + """ + if isinstance(resolution, MetricResolution): + return resolution.value + + if isinstance(resolution, int) and resolution in self._metric_resolutions: + return resolution + + raise MetricResolutionError( + f"Invalid metric resolution '{resolution}', expected either option: {self._metric_resolutions}" # noqa: E501 + ) + def _extract_metric_unit_value(self, unit: Union[str, MetricUnit]) -> str: """Return metric value from metric unit whether that's str or MetricUnit enum @@ -378,12 +449,12 @@ def _extract_metric_unit_value(self, unit: Union[str, MetricUnit]) -> str: """ if isinstance(unit, str): - if unit in self._metric_unit_options: + if unit in self._metric_unit_valid_options: unit = MetricUnit[unit].value if unit not in self._metric_units: raise MetricUnitError( - f"Invalid metric unit '{unit}', expected either option: {self._metric_unit_options}" + f"Invalid metric unit '{unit}', expected either option: {self._metric_unit_valid_options}" ) if isinstance(unit, MetricUnit): @@ -429,10 +500,10 @@ class SingleMetric(MetricManager): **Creates cold start metric with function_version as dimension** import json - from aws_lambda_powertools.metrics import single_metric, MetricUnit + from aws_lambda_powertools.metrics import single_metric, MetricUnit, MetricResolution metric = single_metric(namespace="ServerlessAirline") - metric.add_metric(name="ColdStart", unit=MetricUnit.Count, value=1) + metric.add_metric(name="ColdStart", unit=MetricUnit.Count, value=1, resolution=MetricResolution.Standard) metric.add_dimension(name="function_version", value=47) print(json.dumps(metric.serialize_metric_set(), indent=4)) @@ -443,7 +514,13 @@ class SingleMetric(MetricManager): Inherits from `aws_lambda_powertools.metrics.base.MetricManager` """ - def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> None: + def add_metric( + self, + name: str, + unit: Union[MetricUnit, str], + value: float, + resolution: Union[MetricResolution, int] = 60, + ) -> None: """Method to prevent more than one metric being created Parameters @@ -454,11 +531,13 @@ def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> N Metric unit (e.g. "Seconds", MetricUnit.Seconds) value : float Metric value + resolution : MetricResolution + Metric resolution (e.g. 60, MetricResolution.Standard) """ if len(self.metric_set) > 0: logger.debug(f"Metric {name} already set, skipping...") return - return super().add_metric(name, unit, value) + return super().add_metric(name, unit, value, resolution) @contextmanager @@ -466,6 +545,7 @@ def single_metric( name: str, unit: MetricUnit, value: float, + resolution: Union[MetricResolution, int] = 60, namespace: Optional[str] = None, default_dimensions: Optional[Dict[str, str]] = None, ) -> Generator[SingleMetric, None, None]: @@ -477,8 +557,9 @@ def single_metric( from aws_lambda_powertools import single_metric from aws_lambda_powertools.metrics import MetricUnit + from aws_lambda_powertools.metrics import MetricResolution - with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, namespace="ServerlessAirline") as metric: + with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, resolution=MetricResolution.Standard, namespace="ServerlessAirline") as metric: # noqa E501 metric.add_dimension(name="function_version", value="47") **Same as above but set namespace using environment variable** @@ -487,8 +568,9 @@ def single_metric( from aws_lambda_powertools import single_metric from aws_lambda_powertools.metrics import MetricUnit + from aws_lambda_powertools.metrics import MetricResolution - with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1) as metric: + with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, resolution=MetricResolution.Standard) as metric: # noqa E501 metric.add_dimension(name="function_version", value="47") Parameters @@ -497,6 +579,8 @@ def single_metric( Metric name unit : MetricUnit `aws_lambda_powertools.helper.models.MetricUnit` + resolution : MetricResolution + `aws_lambda_powertools.helper.models.MetricResolution` value : float Metric value namespace: str @@ -511,6 +595,8 @@ def single_metric( ------ MetricUnitError When metric metric isn't supported by CloudWatch + MetricResolutionError + When metric resolution isn't supported by CloudWatch MetricValueError When metric value isn't a number SchemaValidationError @@ -519,7 +605,7 @@ def single_metric( metric_set: Optional[Dict] = None try: metric: SingleMetric = SingleMetric(namespace=namespace) - metric.add_metric(name=name, unit=unit, value=value) + metric.add_metric(name=name, unit=unit, value=value, resolution=resolution) if default_dimensions: for dim_name, dim_value in default_dimensions.items(): diff --git a/aws_lambda_powertools/metrics/exceptions.py b/aws_lambda_powertools/metrics/exceptions.py index 0376c55a40e..94f492d14d7 100644 --- a/aws_lambda_powertools/metrics/exceptions.py +++ b/aws_lambda_powertools/metrics/exceptions.py @@ -4,6 +4,12 @@ class MetricUnitError(Exception): pass +class MetricResolutionError(Exception): + """When metric resolution is not supported by CloudWatch""" + + pass + + class SchemaValidationError(Exception): """When serialization fail schema validation""" diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py index 43a45ff885d..085ebf9053f 100644 --- a/aws_lambda_powertools/metrics/metrics.py +++ b/aws_lambda_powertools/metrics/metrics.py @@ -50,7 +50,9 @@ def lambda_handler(): Raises ------ MetricUnitError - When metric metric isn't supported by CloudWatch + When metric unit isn't supported by CloudWatch + MetricResolutionError + When metric resolution isn't supported by CloudWatch MetricValueError When metric value isn't a number SchemaValidationError diff --git a/aws_lambda_powertools/metrics/types.py b/aws_lambda_powertools/metrics/types.py new file mode 100644 index 00000000000..76fcf7bd18a --- /dev/null +++ b/aws_lambda_powertools/metrics/types.py @@ -0,0 +1,7 @@ +from typing_extensions import NotRequired, TypedDict + + +class MetricNameUnitResolution(TypedDict): + Name: str + Unit: str + StorageResolution: NotRequired[int] diff --git a/docs/core/metrics.md b/docs/core/metrics.md index ca42b632f84..f4bf54cced8 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -20,6 +20,9 @@ If you're new to Amazon CloudWatch, there are two terminologies you must be awar * **Namespace**. It's the highest level container that will group multiple metrics from multiple services for a given application, for example `ServerlessEcommerce`. * **Dimensions**. Metrics metadata in key-value format. They help you slice and dice metrics visualization, for example `ColdStart` metric by Payment `service`. +* **Metric**. It's the name of the metric, for example: `SuccessfulBooking` or `UpdatedBooking`. +* **Unit**. It's a value representing the unit of measure for the corresponding metric, for example: `Count` or `Seconds`. +* **Resolution**. It's a value representing the storage resolution for the corresponding metric. Metrics can be either Standard or High resolution. Read more [here](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/publishingMetrics.html#high-resolution-metrics).
@@ -78,6 +81,22 @@ You can create metrics using `add_metric`, and you can create dimensions for all ???+ warning "Warning: Do not create metrics or dimensions outside the handler" Metrics or dimensions added in the global scope will only be added during cold start. Disregard if you that's the intended behavior. +### Adding high-resolution metrics + +You can create [high-resolution metrics](https://aws.amazon.com/pt/about-aws/whats-new/2023/02/amazon-cloudwatch-high-resolution-metric-extraction-structured-logs/) passing `resolution` parameter to `add_metric`. + +???+ tip "High-resolution metrics - when is it useful?" + High-resolution metrics are data with a granularity of one second and are very useful in several situations such as telemetry, time series, real-time incident management, and others. + +=== "add_high_resolution_metrics.py" + + ```python hl_lines="10" + --8<-- "examples/metrics/src/add_high_resolution_metric.py" + ``` + +???+ tip "Tip: Autocomplete Metric Resolutions" + `MetricResolution` enum facilitates finding a supported metric resolution by CloudWatch. Alternatively, you can pass the values 1 or 60 (must be one of them) as an integer _e.g. `resolution=1`_. + ### Adding multi-value metrics You can call `add_metric()` with the same metric name multiple times. The values will be grouped together in a list. diff --git a/examples/metrics/src/add_high_resolution_metric.py b/examples/metrics/src/add_high_resolution_metric.py new file mode 100644 index 00000000000..633fb114231 --- /dev/null +++ b/examples/metrics/src/add_high_resolution_metric.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricResolution, MetricUnit +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = Metrics() + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1, resolution=MetricResolution.High) diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py index d15b105057e..2a53b42cd16 100644 --- a/tests/functional/test_metrics.py +++ b/tests/functional/test_metrics.py @@ -1,13 +1,15 @@ import json import warnings from collections import namedtuple -from typing import Any, Dict, List +from typing import Any, Dict, List, Union import pytest from aws_lambda_powertools import Metrics, single_metric from aws_lambda_powertools.metrics import ( EphemeralMetrics, + MetricResolution, + MetricResolutionError, MetricUnit, MetricUnitError, MetricValueError, @@ -29,6 +31,11 @@ def reset_metric_set(): yield +@pytest.fixture +def metric_with_resolution() -> Dict[str, Union[str, int]]: + return {"name": "single_metric", "unit": MetricUnit.Count, "value": 1, "resolution": MetricResolution.High} + + @pytest.fixture def metric() -> Dict[str, str]: return {"name": "single_metric", "unit": MetricUnit.Count, "value": 1} @@ -141,6 +148,36 @@ def capture_metrics_output_multiple_emf_objects(capsys): return [json.loads(line.strip()) for line in capsys.readouterr().out.split("\n") if line] +def test_single_metric_logs_with_high_resolution_enum(capsys, metric_with_resolution, dimension, namespace): + # GIVEN we have a metric with high resolution as enum + # WHEN using single_metric context manager + with single_metric(namespace=namespace, **metric_with_resolution) as my_metric: + my_metric.add_dimension(**dimension) + + # THEN we should only have the first metric added + output = capture_metrics_output(capsys) + expected = serialize_single_metric(metric=metric_with_resolution, dimension=dimension, namespace=namespace) + + remove_timestamp(metrics=[output, expected]) + assert expected == output + + +def test_single_metric_logs_with_high_resolution_integer(capsys, metric_with_resolution, dimension, namespace): + # GIVEN we have a metric with high resolution as integer + metric_with_resolution["resolution"] = MetricResolution.High.value + + # WHEN using single_metric context manager + with single_metric(namespace=namespace, **metric_with_resolution) as my_metric: + my_metric.add_dimension(**dimension) + + # THEN we should only have the first metric added + output = capture_metrics_output(capsys) + expected = serialize_single_metric(metric=metric_with_resolution, dimension=dimension, namespace=namespace) + + remove_timestamp(metrics=[output, expected]) + assert expected == output + + def test_single_metric_logs_one_metric_only(capsys, metric, dimension, namespace): # GIVEN we try adding more than one metric # WHEN using single_metric context manager @@ -343,6 +380,29 @@ def lambda_handler(evt, context): assert lambda_handler({}, {}) is True +def test_schema_validation_incorrect_metric_resolution(metric, dimension): + # GIVEN we pass a metric resolution that is not supported by CloudWatch + metric["resolution"] = 10 # metric resolution must be 1 (High) or 60 (Standard) + + # WHEN we try adding a new metric + # THEN it should fail metric unit validation + with pytest.raises(MetricResolutionError, match="Invalid metric resolution.*60"): + with single_metric(**metric) as my_metric: + my_metric.add_dimension(**dimension) + + +@pytest.mark.parametrize("resolution", ["sixty", False, [], {}, object]) +def test_schema_validation_incorrect_metric_resolution_non_integer_enum(metric, dimension, resolution, namespace): + # GIVEN we pass a metric resolution that is not supported by CloudWatch + metric["resolution"] = resolution # metric resolution must be 1 (High) or 60 (Standard) + + # WHEN we try adding a new metric + # THEN it should fail metric unit validation + with pytest.raises(MetricResolutionError, match="Invalid metric resolution.*60"): + with single_metric(namespace=namespace, **metric) as my_metric: + my_metric.add_dimension(**dimension) + + def test_schema_validation_incorrect_metric_unit(metric, dimension, namespace): # GIVEN we pass a metric unit that is not supported by CloudWatch metric["unit"] = "incorrect_unit" @@ -749,6 +809,41 @@ def lambda_handler(evt, ctx): assert expected == output +def test_serialize_high_resolution_metric_set_metric_definition( + metric_with_resolution, dimension, namespace, service, metadata +): + expected_metric_definition = { + "single_metric": [1.0], + "_aws": { + "Timestamp": 1592237875494, + "CloudWatchMetrics": [ + { + "Namespace": "test_namespace", + "Dimensions": [["test_dimension", "service"]], + "Metrics": [{"Name": "single_metric", "Unit": "Count", "StorageResolution": 1}], + } + ], + }, + "service": "test_service", + "username": "test", + "test_dimension": "test", + } + + # GIVEN Metrics is initialized + my_metrics = Metrics(service=service, namespace=namespace) + my_metrics.add_metric(**metric_with_resolution) + my_metrics.add_dimension(**dimension) + my_metrics.add_metadata(**metadata) + + # WHEN metrics are serialized manually + metric_definition_output = my_metrics.serialize_metric_set() + + # THEN we should emit a valid embedded metric definition object + assert "Timestamp" in metric_definition_output["_aws"] + remove_timestamp(metrics=[metric_definition_output, expected_metric_definition]) + assert metric_definition_output == expected_metric_definition + + def test_serialize_metric_set_metric_definition(metric, dimension, namespace, service, metadata): expected_metric_definition = { "single_metric": [1.0], From dfad50b81a5b9b85167eb0299221faec19f401ce Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 10 Feb 2023 17:19:15 +0100 Subject: [PATCH 26/35] docs(metrics): fix syntax highlighting for new default_dimensions --- docs/core/metrics.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/core/metrics.md b/docs/core/metrics.md index f4bf54cced8..3a4dd67e878 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -237,9 +237,9 @@ CloudWatch EMF uses the same dimensions across all your metrics. Use `single_met By default it will skip all previously defined dimensions including default dimensions. Use `default_dimensions` keyword argument if you want to reuse default dimensions or specify custom dimensions from a dictionary. -=== "single_metric_default_dimensions_inherit.json" +=== "single_metric_default_dimensions_inherit.py" - ```json hl_lines="10 15" + ```python hl_lines="10 15" --8<-- "examples/metrics/src/single_metric_default_dimensions_inherit.py" ``` From 56a1e832d2c82897104f138542f477ecd9e041ff Mon Sep 17 00:00:00 2001 From: Release bot Date: Fri, 10 Feb 2023 16:19:54 +0000 Subject: [PATCH 27/35] update changelog with latest changes --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c7be91807be..2e59503c890 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ * **homepage:** set url for end-of-support in announce block ([#1893](https://github.com/awslabs/aws-lambda-powertools-python/issues/1893)) * **idempotency:** add IAM permissions section ([#1902](https://github.com/awslabs/aws-lambda-powertools-python/issues/1902)) +* **metrics:** fix syntax highlighting for new default_dimensions ## Features From d69953a14bd805f599cda5be1e1b6d9d02bf4f77 Mon Sep 17 00:00:00 2001 From: Marc Ramirez Invernon Date: Fri, 10 Feb 2023 17:35:18 +0100 Subject: [PATCH 28/35] docs(homepage): Replace poetry command to add group parameter (#1917) Co-authored-by: Leandro Damascena --- docs/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.md b/docs/index.md index 36b6906ceb3..8e3ee4a915c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -49,7 +49,7 @@ Powertools relies on the AWS SDK bundled in the Lambda runtime. This helps us ac This means you need to add AWS SDK as a development dependency (not as a production dependency). * **Pip**: `pip install "aws-lambda-powertools[aws-sdk]"` -* **Poetry**: `poetry add "aws-lambda-powertools[aws-sdk]" --dev` +* **Poetry**: `poetry add "aws-lambda-powertools[aws-sdk]" --group dev` * **Pipenv**: `pipenv install --dev "aws-lambda-powertools[aws-sdk]"` ???+ note "Local emulation" From 3f0024a6253a54bf28885341a2cb279ade25912c Mon Sep 17 00:00:00 2001 From: Release bot Date: Fri, 10 Feb 2023 16:35:49 +0000 Subject: [PATCH 29/35] update changelog with latest changes --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2e59503c890..1a01bd2d7c7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ ## Documentation +* **homepage:** Replace poetry command to add group parameter ([#1917](https://github.com/awslabs/aws-lambda-powertools-python/issues/1917)) * **homepage:** set url for end-of-support in announce block ([#1893](https://github.com/awslabs/aws-lambda-powertools-python/issues/1893)) * **idempotency:** add IAM permissions section ([#1902](https://github.com/awslabs/aws-lambda-powertools-python/issues/1902)) * **metrics:** fix syntax highlighting for new default_dimensions From 332abb668be2f1e78ea68ef942ed65f221667a4c Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 10 Feb 2023 17:41:19 +0100 Subject: [PATCH 30/35] docs(metrics): remove reduntant wording before release --- docs/core/metrics.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/core/metrics.md b/docs/core/metrics.md index 3a4dd67e878..cb1712676b8 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -85,7 +85,7 @@ You can create metrics using `add_metric`, and you can create dimensions for all You can create [high-resolution metrics](https://aws.amazon.com/pt/about-aws/whats-new/2023/02/amazon-cloudwatch-high-resolution-metric-extraction-structured-logs/) passing `resolution` parameter to `add_metric`. -???+ tip "High-resolution metrics - when is it useful?" +???+ tip "When is it useful?" High-resolution metrics are data with a granularity of one second and are very useful in several situations such as telemetry, time series, real-time incident management, and others. === "add_high_resolution_metrics.py" From 733db103b99fe0462213f9614664348bc5517601 Mon Sep 17 00:00:00 2001 From: Release bot Date: Fri, 10 Feb 2023 16:41:51 +0000 Subject: [PATCH 31/35] update changelog with latest changes --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1a01bd2d7c7..1dd0f321fd5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ * **homepage:** Replace poetry command to add group parameter ([#1917](https://github.com/awslabs/aws-lambda-powertools-python/issues/1917)) * **homepage:** set url for end-of-support in announce block ([#1893](https://github.com/awslabs/aws-lambda-powertools-python/issues/1893)) * **idempotency:** add IAM permissions section ([#1902](https://github.com/awslabs/aws-lambda-powertools-python/issues/1902)) +* **metrics:** remove reduntant wording before release * **metrics:** fix syntax highlighting for new default_dimensions ## Features From 5229ac360318da37c757b5aba0788dbbabbc489e Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 10 Feb 2023 17:46:58 +0100 Subject: [PATCH 32/35] docs(engine): re-enable clipboard button for code snippets --- mkdocs.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/mkdocs.yml b/mkdocs.yml index c4fcb0d7c51..f0a0f3b9681 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -58,6 +58,7 @@ theme: - navigation.indexes - navigation.tracking - content.code.annotate + - content.code.copy icon: repo: fontawesome/brands/github logo: media/aws-logo-light.svg From ddc64af4f68149f9ebd036258325234cc64d686f Mon Sep 17 00:00:00 2001 From: Release bot Date: Fri, 10 Feb 2023 16:47:26 +0000 Subject: [PATCH 33/35] update changelog with latest changes --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1dd0f321fd5..27ca3031ed5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ ## Documentation +* **engine:** re-enable clipboard button for code snippets * **homepage:** Replace poetry command to add group parameter ([#1917](https://github.com/awslabs/aws-lambda-powertools-python/issues/1917)) * **homepage:** set url for end-of-support in announce block ([#1893](https://github.com/awslabs/aws-lambda-powertools-python/issues/1893)) * **idempotency:** add IAM permissions section ([#1902](https://github.com/awslabs/aws-lambda-powertools-python/issues/1902)) From a0553881658bdd29299b1efcfa6b1362c00ded70 Mon Sep 17 00:00:00 2001 From: Release bot Date: Fri, 10 Feb 2023 17:12:06 +0000 Subject: [PATCH 34/35] bump version to 2.8.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index e8fdc91ca0c..dbef79c46ef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "2.7.1" +version = "2.8.0" description = "A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, batching, idempotency, feature flags, and more." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] From 43feb1eeb76dcd113d265a0bd0de61e3888386a6 Mon Sep 17 00:00:00 2001 From: Release bot Date: Fri, 10 Feb 2023 17:38:08 +0000 Subject: [PATCH 35/35] chore: update v2 layer ARN on documentation --- docs/index.md | 120 +++++++++++++++++++++++++------------------------- 1 file changed, 60 insertions(+), 60 deletions(-) diff --git a/docs/index.md b/docs/index.md index 8e3ee4a915c..ab7bedbda66 100644 --- a/docs/index.md +++ b/docs/index.md @@ -26,8 +26,8 @@ A suite of utilities for AWS Lambda functions to ease adopting best practices su Powertools is available in the following formats: -* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:20**](#){: .copyMe}:clipboard: -* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20**](#){: .copyMe}:clipboard: +* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:21**](#){: .copyMe}:clipboard: +* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21**](#){: .copyMe}:clipboard: * **PyPi**: **`pip install "aws-lambda-powertools"`** ???+ info "Some utilities require additional dependencies" @@ -67,55 +67,55 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. | Region | Layer ARN | | ---------------- | ---------------------------------------------------------------------------------------------------------- | - | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:20](#){: .copyMe}:clipboard: | + | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:21](#){: .copyMe}:clipboard: | === "arm64" | Region | Layer ARN | | ---------------- | ---------------------------------------------------------------------------------------------------------------- | - | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20](#){: .copyMe}:clipboard: | + | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21](#){: .copyMe}:clipboard: | ??? note "Note: Click to expand and copy code snippets for popular frameworks" @@ -128,7 +128,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. Type: AWS::Serverless::Function Properties: Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:20 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:21 ``` === "Serverless framework" @@ -138,7 +138,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. hello: handler: lambda_function.lambda_handler layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:20 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:21 ``` === "CDK" @@ -154,7 +154,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( self, id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:20" + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:21" ) aws_lambda.Function(self, 'sample-app-lambda', @@ -203,7 +203,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:20"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:21"] source_code_hash = filebase64sha256("lambda_function_payload.zip") } @@ -256,7 +256,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21 ❯ amplify push -y @@ -267,7 +267,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:20 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:21 ? Do you want to edit the local lambda function now? No ``` @@ -276,7 +276,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. Change {region} to your AWS region, e.g. `eu-west-1` ```bash title="AWS CLI" - aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:20 --region {region} + aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:21 --region {region} ``` The pre-signed URL to download this Lambda Layer will be within `Location` key. @@ -291,7 +291,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. Properties: Architectures: [arm64] Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21 ``` === "Serverless framework" @@ -302,7 +302,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. handler: lambda_function.lambda_handler architecture: arm64 layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21 ``` === "CDK" @@ -318,7 +318,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( self, id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20" + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21" ) aws_lambda.Function(self, 'sample-app-lambda', @@ -368,7 +368,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21"] architectures = ["arm64"] source_code_hash = filebase64sha256("lambda_function_payload.zip") @@ -424,7 +424,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21 ❯ amplify push -y @@ -435,7 +435,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21 ? Do you want to edit the local lambda function now? No ``` @@ -443,7 +443,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. Change {region} to your AWS region, e.g. `eu-west-1` ```bash title="AWS CLI" - aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:20 --region {region} + aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:21 --region {region} ``` The pre-signed URL to download this Lambda Layer will be within `Location` key.