diff --git a/.github/scripts/label_pr_based_on_title.js b/.github/scripts/label_pr_based_on_title.js
index 95c33841f92..e2e208c2d78 100644
--- a/.github/scripts/label_pr_based_on_title.js
+++ b/.github/scripts/label_pr_based_on_title.js
@@ -17,6 +17,10 @@ module.exports = async ({github, context, core}) => {
"deprecated": DEPRECATED_REGEX,
}
+ // get PR labels from env
+ const prLabels = process.env.PR_LABELS.replaceAll("\"", "").split(",");
+ const labelKeys = Object.keys(labels);
+
// Maintenance: We should keep track of modified PRs in case their titles change
let miss = 0;
try {
@@ -26,6 +30,18 @@ module.exports = async ({github, context, core}) => {
if (matches != null) {
core.info(`Auto-labeling PR ${PR_NUMBER} with ${label}`)
+ for (const prLabel of prLabels) {
+ if (labelKeys.includes(prLabel) && prLabel !== label) {
+ core.info(`PR previously tagged with: ${prLabel}, removing.`);
+ await github.rest.issues.removeLabel({
+ issue_number: PR_NUMBER,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ name: prLabel
+ })
+ }
+ }
+
await github.rest.issues.addLabels({
issue_number: PR_NUMBER,
owner: context.repo.owner,
diff --git a/.github/scripts/save_pr_details.js b/.github/scripts/save_pr_details.js
index 83bd3bf70d4..ba2de975b3c 100644
--- a/.github/scripts/save_pr_details.js
+++ b/.github/scripts/save_pr_details.js
@@ -1,9 +1,19 @@
-module.exports = async ({context, core}) => {
+module.exports = async ({github, context, core}) => {
const fs = require('fs');
const filename = "pr.txt";
+ const labelsData = await github.rest.issues.listLabelsOnIssue({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ issue_number: (context.payload.issue || context.payload.pull_request || context.payload).number,
+ });
+
+ const labels = labelsData.data.map((label) => {
+ return label['name'];
+ });
+
try {
- fs.writeFileSync(`./${filename}`, JSON.stringify(context.payload));
+ fs.writeFileSync(`./${filename}`, JSON.stringify({...context.payload, ...{labels:labels.join(",")}}));
return `PR successfully saved ${filename}`
} catch (err) {
diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml
index b9771119697..56e2cdb1ce2 100644
--- a/.github/workflows/dependency-review.yml
+++ b/.github/workflows/dependency-review.yml
@@ -19,4 +19,4 @@ jobs:
- name: 'Checkout Repository'
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
- name: 'Dependency Review'
- uses: actions/dependency-review-action@f6fff72a3217f580d5afd49a46826795305b63c7 # v3.0.8
+ uses: actions/dependency-review-action@6c5ccdad469c9f8a2996bfecaec55a631a347034 # v3.1.0
diff --git a/.github/workflows/publish_v2_layer.yml b/.github/workflows/publish_v2_layer.yml
index e875e6eb92c..507ca9db843 100644
--- a/.github/workflows/publish_v2_layer.yml
+++ b/.github/workflows/publish_v2_layer.yml
@@ -117,14 +117,14 @@ jobs:
pip install --require-hashes -r requirements.txt
- name: Set up QEMU
- uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # v2.0.0
+ uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v2.0.0
with:
platforms: arm64
# NOTE: we need QEMU to build Layer against a different architecture (e.g., ARM)
- name: Set up Docker Buildx
id: builder
- uses: docker/setup-buildx-action@885d1462b80bc1c1c7f0b00334ad271f09369c55 # v2.10.0
+ uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0
with:
install: true
driver: docker
diff --git a/.github/workflows/reusable_export_pr_details.yml b/.github/workflows/reusable_export_pr_details.yml
index d942a156950..ad29d4c9bf1 100644
--- a/.github/workflows/reusable_export_pr_details.yml
+++ b/.github/workflows/reusable_export_pr_details.yml
@@ -49,6 +49,9 @@ on:
prIsMerged:
description: "Whether PR is merged"
value: ${{ jobs.export_pr_details.outputs.prIsMerged }}
+ prLabels:
+ description: "PR Labels"
+ value: ${{ jobs.export_pr_details.outputs.prLabels }}
permissions:
contents: read
@@ -70,6 +73,7 @@ jobs:
prAuthor: ${{ steps.prAuthor.outputs.prAuthor }}
prAction: ${{ steps.prAction.outputs.prAction }}
prIsMerged: ${{ steps.prIsMerged.outputs.prIsMerged }}
+ prLabels: ${{ steps.prLabels.outputs.prLabels }}
steps:
- name: Checkout repository # in case caller workflow doesn't checkout thus failing with file not found
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
@@ -106,3 +110,6 @@ jobs:
- name: "Export Pull Request Merged status"
id: prIsMerged
run: echo prIsMerged="$(jq -c '.pull_request.merged' "${FILENAME}")" >> "$GITHUB_OUTPUT"
+ - name: "Export Pull Request labels"
+ id: prLabels
+ run: echo prLabels="$(jq -c '.labels' "${FILENAME}")" >> "$GITHUB_OUTPUT"
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 988da68ea70..bbb3c495d97 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,38 @@
# Unreleased
+## Code Refactoring
+
+* **parameters:** BaseProvider._get to also support Dict ([#3090](https://github.com/aws-powertools/powertools-lambda-python/issues/3090))
+
+## Features
+
+* **event_source:** add Kinesis Firehose Data Transformation data class ([#3029](https://github.com/aws-powertools/powertools-lambda-python/issues/3029))
+* **event_sources:** add Secrets Manager secret rotation event ([#3061](https://github.com/aws-powertools/powertools-lambda-python/issues/3061))
+
+## Maintenance
+
+* **automation:** remove previous labels when PR is updated ([#3066](https://github.com/aws-powertools/powertools-lambda-python/issues/3066))
+* **deps:** bump actions/dependency-review-action from 3.0.8 to 3.1.0 ([#3071](https://github.com/aws-powertools/powertools-lambda-python/issues/3071))
+* **deps:** bump docker/setup-qemu-action from 2.2.0 to 3.0.0 ([#3081](https://github.com/aws-powertools/powertools-lambda-python/issues/3081))
+* **deps:** bump docker/setup-buildx-action from 2.10.0 to 3.0.0 ([#3083](https://github.com/aws-powertools/powertools-lambda-python/issues/3083))
+* **deps:** bump squidfunk/mkdocs-material from `dd1770c` to `c4890ab` in /docs ([#3078](https://github.com/aws-powertools/powertools-lambda-python/issues/3078))
+* **deps-dev:** bump cfn-lint from 0.79.9 to 0.79.10 ([#3077](https://github.com/aws-powertools/powertools-lambda-python/issues/3077))
+* **deps-dev:** bump hvac from 1.2.0 to 1.2.1 ([#3075](https://github.com/aws-powertools/powertools-lambda-python/issues/3075))
+* **deps-dev:** bump ruff from 0.0.288 to 0.0.289 ([#3080](https://github.com/aws-powertools/powertools-lambda-python/issues/3080))
+* **deps-dev:** bump ruff from 0.0.287 to 0.0.288 ([#3076](https://github.com/aws-powertools/powertools-lambda-python/issues/3076))
+* **deps-dev:** bump aws-cdk from 2.95.0 to 2.95.1 ([#3074](https://github.com/aws-powertools/powertools-lambda-python/issues/3074))
+* **deps-dev:** bump the boto-typing group with 1 update ([#3085](https://github.com/aws-powertools/powertools-lambda-python/issues/3085))
+* **deps-dev:** bump aws-cdk from 2.95.1 to 2.96.0 ([#3087](https://github.com/aws-powertools/powertools-lambda-python/issues/3087))
+* **deps-dev:** bump sentry-sdk from 1.30.0 to 1.31.0 ([#3086](https://github.com/aws-powertools/powertools-lambda-python/issues/3086))
+* **deps-dev:** bump aws-cdk from 2.94.0 to 2.95.0 ([#3070](https://github.com/aws-powertools/powertools-lambda-python/issues/3070))
+* **deps-dev:** bump cfn-lint from 0.79.10 to 0.79.11 ([#3088](https://github.com/aws-powertools/powertools-lambda-python/issues/3088))
+* **deps-dev:** bump aws-cdk from 2.96.0 to 2.96.1 ([#3093](https://github.com/aws-powertools/powertools-lambda-python/issues/3093))
+* **typing:** move backwards compat types to shared types ([#3092](https://github.com/aws-powertools/powertools-lambda-python/issues/3092))
+
+
+
+## [v2.24.0] - 2023-09-08
## Bug Fixes
* **event_handler:** expanding safe URI characters to include +$& ([#3026](https://github.com/aws-powertools/powertools-lambda-python/issues/3026))
@@ -13,28 +45,42 @@
* **batch:** type response() method ([#3023](https://github.com/aws-powertools/powertools-lambda-python/issues/3023))
+## Documentation
+
+* **event_handler:** demonstrate how to combine logger correlation ID and middleware ([#3064](https://github.com/aws-powertools/powertools-lambda-python/issues/3064))
+* **event_handler:** use correct correlation_id for logger in middleware example ([#3063](https://github.com/aws-powertools/powertools-lambda-python/issues/3063))
+* **idempotency:** use tab navigation, improves custom serializer example, and additional explanations ([#3067](https://github.com/aws-powertools/powertools-lambda-python/issues/3067))
+
## Features
+* **event_handler:** add Middleware support for REST Event Handler ([#2917](https://github.com/aws-powertools/powertools-lambda-python/issues/2917))
* **idempotency:** add support to custom serialization/deserialization on idempotency decorator ([#2951](https://github.com/aws-powertools/powertools-lambda-python/issues/2951))
## Maintenance
+* version bump
* **deps:** bump squidfunk/mkdocs-material from `b1f7f94` to `f4764d1` in /docs ([#3031](https://github.com/aws-powertools/powertools-lambda-python/issues/3031))
+* **deps:** bump gitpython from 3.1.32 to 3.1.35 in /docs ([#3059](https://github.com/aws-powertools/powertools-lambda-python/issues/3059))
+* **deps:** bump squidfunk/mkdocs-material from `f4764d1` to `dd1770c` in /docs ([#3044](https://github.com/aws-powertools/powertools-lambda-python/issues/3044))
* **deps:** bump actions/checkout from 3.6.0 to 4.0.0 ([#3041](https://github.com/aws-powertools/powertools-lambda-python/issues/3041))
* **deps:** bump squidfunk/mkdocs-material from `97da15b` to `b1f7f94` in /docs ([#3021](https://github.com/aws-powertools/powertools-lambda-python/issues/3021))
-* **deps:** bump squidfunk/mkdocs-material from `f4764d1` to `dd1770c` in /docs ([#3044](https://github.com/aws-powertools/powertools-lambda-python/issues/3044))
* **deps:** bump docker/setup-buildx-action from 2.9.1 to 2.10.0 ([#3022](https://github.com/aws-powertools/powertools-lambda-python/issues/3022))
+* **deps:** bump actions/upload-artifact from 3.1.2 to 3.1.3 ([#3053](https://github.com/aws-powertools/powertools-lambda-python/issues/3053))
+* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3052](https://github.com/aws-powertools/powertools-lambda-python/issues/3052))
* **deps-dev:** bump mkdocs-material from 9.2.6 to 9.2.7 ([#3043](https://github.com/aws-powertools/powertools-lambda-python/issues/3043))
+* **deps-dev:** bump cfn-lint from 0.79.7 to 0.79.8 ([#3033](https://github.com/aws-powertools/powertools-lambda-python/issues/3033))
+* **deps-dev:** bump mkdocs-material from 9.2.5 to 9.2.6 ([#3032](https://github.com/aws-powertools/powertools-lambda-python/issues/3032))
+* **deps-dev:** bump ruff from 0.0.286 to 0.0.287 ([#3035](https://github.com/aws-powertools/powertools-lambda-python/issues/3035))
* **deps-dev:** bump sentry-sdk from 1.29.2 to 1.30.0 ([#3028](https://github.com/aws-powertools/powertools-lambda-python/issues/3028))
* **deps-dev:** bump the boto-typing group with 11 updates ([#3027](https://github.com/aws-powertools/powertools-lambda-python/issues/3027))
-* **deps-dev:** bump pytest from 7.4.0 to 7.4.1 ([#3042](https://github.com/aws-powertools/powertools-lambda-python/issues/3042))
-* **deps-dev:** bump mkdocs-material from 9.2.5 to 9.2.6 ([#3032](https://github.com/aws-powertools/powertools-lambda-python/issues/3032))
-* **deps-dev:** bump cfn-lint from 0.79.7 to 0.79.8 ([#3033](https://github.com/aws-powertools/powertools-lambda-python/issues/3033))
+* **deps-dev:** bump pytest from 7.4.1 to 7.4.2 ([#3057](https://github.com/aws-powertools/powertools-lambda-python/issues/3057))
+* **deps-dev:** bump hvac from 1.1.1 to 1.2.0 ([#3054](https://github.com/aws-powertools/powertools-lambda-python/issues/3054))
+* **deps-dev:** bump cfn-lint from 0.79.8 to 0.79.9 ([#3046](https://github.com/aws-powertools/powertools-lambda-python/issues/3046))
* **deps-dev:** bump the boto-typing group with 1 update ([#3013](https://github.com/aws-powertools/powertools-lambda-python/issues/3013))
-* **deps-dev:** bump aws-cdk from 2.93.0 to 2.94.0 ([#3036](https://github.com/aws-powertools/powertools-lambda-python/issues/3036))
+* **deps-dev:** bump pytest from 7.4.0 to 7.4.1 ([#3042](https://github.com/aws-powertools/powertools-lambda-python/issues/3042))
* **deps-dev:** bump ruff from 0.0.285 to 0.0.286 ([#3014](https://github.com/aws-powertools/powertools-lambda-python/issues/3014))
-* **deps-dev:** bump cfn-lint from 0.79.8 to 0.79.9 ([#3046](https://github.com/aws-powertools/powertools-lambda-python/issues/3046))
-* **deps-dev:** bump ruff from 0.0.286 to 0.0.287 ([#3035](https://github.com/aws-powertools/powertools-lambda-python/issues/3035))
+* **deps-dev:** bump gitpython from 3.1.32 to 3.1.35 ([#3060](https://github.com/aws-powertools/powertools-lambda-python/issues/3060))
+* **deps-dev:** bump aws-cdk from 2.93.0 to 2.94.0 ([#3036](https://github.com/aws-powertools/powertools-lambda-python/issues/3036))
@@ -3753,7 +3799,8 @@
* Merge pull request [#5](https://github.com/aws-powertools/powertools-lambda-python/issues/5) from jfuss/feat/python38
-[Unreleased]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.23.1...HEAD
+[Unreleased]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.24.0...HEAD
+[v2.24.0]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.23.1...v2.24.0
[v2.23.1]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.23.0...v2.23.1
[v2.23.0]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.22.0...v2.23.0
[v2.22.0]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.21.0...v2.22.0
diff --git a/aws_lambda_powertools/event_handler/middlewares/base.py b/aws_lambda_powertools/event_handler/middlewares/base.py
index 32a4486bb31..fb4bf37cc74 100644
--- a/aws_lambda_powertools/event_handler/middlewares/base.py
+++ b/aws_lambda_powertools/event_handler/middlewares/base.py
@@ -1,10 +1,9 @@
from abc import ABC, abstractmethod
from typing import Generic
-from typing_extensions import Protocol
-
from aws_lambda_powertools.event_handler.api_gateway import Response
from aws_lambda_powertools.event_handler.types import EventHandlerInstance
+from aws_lambda_powertools.shared.types import Protocol
class NextMiddleware(Protocol):
diff --git a/aws_lambda_powertools/logging/types.py b/aws_lambda_powertools/logging/types.py
index ede369491f1..d166b2e023c 100644
--- a/aws_lambda_powertools/logging/types.py
+++ b/aws_lambda_powertools/logging/types.py
@@ -1,24 +1,9 @@
from __future__ import annotations
-import sys
-
-if sys.version_info >= (3, 11):
- from typing import NotRequired
-else:
- from typing_extensions import NotRequired
-
-if sys.version_info >= (3, 8):
- from typing import TypedDict
-else:
- from typing_extensions import TypedDict
-
-if sys.version_info >= (3, 10):
- from typing import TypeAlias
-else:
- from typing_extensions import TypeAlias
-
from typing import Any, Dict, List, Union
+from aws_lambda_powertools.shared.types import NotRequired, TypeAlias, TypedDict
+
LogRecord: TypeAlias = Union[Dict[str, Any], "PowertoolsLogRecord"]
diff --git a/aws_lambda_powertools/metrics/functions.py b/aws_lambda_powertools/metrics/functions.py
index d951c0749a3..e259826f1a7 100644
--- a/aws_lambda_powertools/metrics/functions.py
+++ b/aws_lambda_powertools/metrics/functions.py
@@ -1,12 +1,11 @@
from __future__ import annotations
-from typing import List
-
from aws_lambda_powertools.metrics.provider.cloudwatch_emf.exceptions import (
MetricResolutionError,
MetricUnitError,
)
from aws_lambda_powertools.metrics.provider.cloudwatch_emf.metric_properties import MetricResolution, MetricUnit
+from aws_lambda_powertools.shared.types import List
def extract_cloudwatch_metric_resolution_value(metric_resolutions: List, resolution: int | MetricResolution) -> int:
diff --git a/aws_lambda_powertools/metrics/provider/cloudwatch_emf/types.py b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/types.py
index bf3a48ea13f..359fdc4ee6c 100644
--- a/aws_lambda_powertools/metrics/provider/cloudwatch_emf/types.py
+++ b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/types.py
@@ -1,6 +1,4 @@
-from typing import List
-
-from typing_extensions import NotRequired, TypedDict
+from aws_lambda_powertools.shared.types import List, NotRequired, TypedDict
class CloudWatchEMFMetric(TypedDict):
diff --git a/aws_lambda_powertools/metrics/types.py b/aws_lambda_powertools/metrics/types.py
index 76fcf7bd18a..d9eea6fe51e 100644
--- a/aws_lambda_powertools/metrics/types.py
+++ b/aws_lambda_powertools/metrics/types.py
@@ -1,4 +1,4 @@
-from typing_extensions import NotRequired, TypedDict
+from aws_lambda_powertools.shared.types import NotRequired, TypedDict
class MetricNameUnitResolution(TypedDict):
diff --git a/aws_lambda_powertools/shared/__init__.py b/aws_lambda_powertools/shared/__init__.py
index e69de29bb2d..d68e37349b7 100644
--- a/aws_lambda_powertools/shared/__init__.py
+++ b/aws_lambda_powertools/shared/__init__.py
@@ -0,0 +1 @@
+"""Internal shared functions. Do not rely on it besides internal usage."""
diff --git a/aws_lambda_powertools/shared/cookies.py b/aws_lambda_powertools/shared/cookies.py
index 944bcb5dc9f..1b57d860201 100644
--- a/aws_lambda_powertools/shared/cookies.py
+++ b/aws_lambda_powertools/shared/cookies.py
@@ -1,7 +1,9 @@
from datetime import datetime
from enum import Enum
from io import StringIO
-from typing import List, Optional
+from typing import Optional
+
+from aws_lambda_powertools.shared.types import List
class SameSite(Enum):
diff --git a/aws_lambda_powertools/shared/types.py b/aws_lambda_powertools/shared/types.py
index b29c04cbe6b..633db46c587 100644
--- a/aws_lambda_powertools/shared/types.py
+++ b/aws_lambda_powertools/shared/types.py
@@ -1,14 +1,25 @@
import sys
from typing import Any, Callable, Dict, List, TypeVar, Union
-AnyCallableT = TypeVar("AnyCallableT", bound=Callable[..., Any]) # noqa: VNE001
-# JSON primitives only, mypy doesn't support recursive tho
-JSONType = Union[str, int, float, bool, None, Dict[str, Any], List[Any]]
+if sys.version_info >= (3, 8):
+ from typing import Literal, Protocol, TypedDict
+else:
+ from typing_extensions import Literal, Protocol, TypedDict
-if sys.version_info >= (3, 8):
- from typing import Protocol
+if sys.version_info >= (3, 11):
+ from typing import NotRequired
else:
- from typing_extensions import Protocol
+ from typing_extensions import NotRequired
+
+
+if sys.version_info >= (3, 10):
+ from typing import TypeAlias
+else:
+ from typing_extensions import TypeAlias
+
+AnyCallableT = TypeVar("AnyCallableT", bound=Callable[..., Any]) # noqa: VNE001
+# JSON primitives only, mypy doesn't support recursive tho
+JSONType = Union[str, int, float, bool, None, Dict[str, Any], List[Any]]
-__all__ = ["Protocol"]
+__all__ = ["Protocol", "TypedDict", "Literal", "NotRequired", "TypeAlias"]
diff --git a/aws_lambda_powertools/shared/version.py b/aws_lambda_powertools/shared/version.py
index d7408e73ac3..d4d652c19b7 100644
--- a/aws_lambda_powertools/shared/version.py
+++ b/aws_lambda_powertools/shared/version.py
@@ -1,3 +1,3 @@
"""Exposes version constant to avoid circular dependencies."""
-VERSION = "2.23.1"
+VERSION = "2.25.0"
diff --git a/aws_lambda_powertools/utilities/batch/types.py b/aws_lambda_powertools/utilities/batch/types.py
index 4b07dab51a5..40083537e04 100644
--- a/aws_lambda_powertools/utilities/batch/types.py
+++ b/aws_lambda_powertools/utilities/batch/types.py
@@ -1,10 +1,7 @@
-#
-# type specifics
-#
import sys
-from typing import List, Optional, Type, Union
+from typing import Optional, Type, Union
-from typing_extensions import TypedDict
+from aws_lambda_powertools.shared.types import List, TypedDict
has_pydantic = "pydantic" in sys.modules
diff --git a/aws_lambda_powertools/utilities/data_classes/__init__.py b/aws_lambda_powertools/utilities/data_classes/__init__.py
index c619104fda8..99754266928 100644
--- a/aws_lambda_powertools/utilities/data_classes/__init__.py
+++ b/aws_lambda_powertools/utilities/data_classes/__init__.py
@@ -14,10 +14,16 @@
from .event_bridge_event import EventBridgeEvent
from .event_source import event_source
from .kafka_event import KafkaEvent
-from .kinesis_firehose_event import KinesisFirehoseEvent
+from .kinesis_firehose_event import (
+ KinesisFirehoseDataTransformationRecord,
+ KinesisFirehoseDataTransformationRecordMetadata,
+ KinesisFirehoseDataTransformationResponse,
+ KinesisFirehoseEvent,
+)
from .kinesis_stream_event import KinesisStreamEvent
from .lambda_function_url_event import LambdaFunctionUrlEvent
from .s3_event import S3Event, S3EventBridgeNotificationEvent
+from .secrets_manager_event import SecretsManagerEvent
from .ses_event import SESEvent
from .sns_event import SNSEvent
from .sqs_event import SQSEvent
@@ -26,6 +32,7 @@
__all__ = [
"APIGatewayProxyEvent",
"APIGatewayProxyEventV2",
+ "SecretsManagerEvent",
"AppSyncResolverEvent",
"ALBEvent",
"CloudWatchDashboardCustomWidgetEvent",
@@ -37,6 +44,9 @@
"KafkaEvent",
"KinesisFirehoseEvent",
"KinesisStreamEvent",
+ "KinesisFirehoseDataTransformationResponse",
+ "KinesisFirehoseDataTransformationRecord",
+ "KinesisFirehoseDataTransformationRecordMetadata",
"LambdaFunctionUrlEvent",
"S3Event",
"S3EventBridgeNotificationEvent",
diff --git a/aws_lambda_powertools/utilities/data_classes/kinesis_firehose_event.py b/aws_lambda_powertools/utilities/data_classes/kinesis_firehose_event.py
index 47dc196856d..dd42a09fa5e 100644
--- a/aws_lambda_powertools/utilities/data_classes/kinesis_firehose_event.py
+++ b/aws_lambda_powertools/utilities/data_classes/kinesis_firehose_event.py
@@ -1,9 +1,179 @@
import base64
-from typing import Iterator, Optional
+import json
+import warnings
+from dataclasses import dataclass, field
+from typing import Any, Callable, ClassVar, Dict, Iterator, List, Optional, Tuple
+
+from typing_extensions import Literal
from aws_lambda_powertools.utilities.data_classes.common import DictWrapper
+@dataclass(repr=False, order=False, frozen=True)
+class KinesisFirehoseDataTransformationRecordMetadata:
+ """
+ Metadata in Firehose Data Transform Record.
+
+ Parameters
+ ----------
+ partition_keys: Dict[str, str]
+ A dict of partition keys/value in string format, e.g. `{"year":"2023","month":"09"}`
+
+ Documentation:
+ --------------
+ - https://docs.aws.amazon.com/firehose/latest/dev/dynamic-partitioning.html
+ """
+
+ partition_keys: Dict[str, str] = field(default_factory=lambda: {})
+
+ def asdict(self) -> Dict:
+ if self.partition_keys is not None:
+ return {"partitionKeys": self.partition_keys}
+ return {}
+
+
+@dataclass(repr=False, order=False)
+class KinesisFirehoseDataTransformationRecord:
+ """Record in Kinesis Data Firehose response object.
+
+ Parameters
+ ----------
+ record_id: str
+ uniquely identifies this record within the current batch
+ result: Literal["Ok", "Dropped", "ProcessingFailed"]
+ record data transformation status, whether it succeeded, should be dropped, or failed.
+ data: str
+ base64-encoded payload, by default empty string.
+
+ Use `data_from_text` or `data_from_json` methods to convert data if needed.
+
+ metadata: Optional[KinesisFirehoseDataTransformationRecordMetadata]
+ Metadata associated with this record; can contain partition keys.
+
+ See: https://docs.aws.amazon.com/firehose/latest/dev/dynamic-partitioning.html
+ json_serializer: Callable
+ function to serialize `obj` to a JSON formatted `str`, by default json.dumps
+ json_deserializer: Callable
+ function to deserialize `str`, `bytes`, bytearray` containing a JSON document to a Python `obj`,
+ by default json.loads
+
+ Documentation:
+ --------------
+ - https://docs.aws.amazon.com/firehose/latest/dev/data-transformation.html
+ """
+
+ _valid_result_types: ClassVar[Tuple[str, str, str]] = ("Ok", "Dropped", "ProcessingFailed")
+
+ record_id: str
+ result: Literal["Ok", "Dropped", "ProcessingFailed"] = "Ok"
+ data: str = ""
+ metadata: Optional[KinesisFirehoseDataTransformationRecordMetadata] = None
+ json_serializer: Callable = json.dumps
+ json_deserializer: Callable = json.loads
+ _json_data: Optional[Any] = None
+
+ def asdict(self) -> Dict:
+ if self.result not in self._valid_result_types:
+ warnings.warn(
+ stacklevel=1,
+ message=f'The result "{self.result}" is not valid, Choose from "Ok", "Dropped", "ProcessingFailed"',
+ )
+
+ record: Dict[str, Any] = {
+ "recordId": self.record_id,
+ "result": self.result,
+ "data": self.data,
+ }
+ if self.metadata:
+ record["metadata"] = self.metadata.asdict()
+ return record
+
+ @property
+ def data_as_bytes(self) -> bytes:
+ """Decoded base64-encoded data as bytes"""
+ if not self.data:
+ return b""
+ return base64.b64decode(self.data)
+
+ @property
+ def data_as_text(self) -> str:
+ """Decoded base64-encoded data as text"""
+ if not self.data:
+ return ""
+ return self.data_as_bytes.decode("utf-8")
+
+ @property
+ def data_as_json(self) -> Dict:
+ """Decoded base64-encoded data loaded to json"""
+ if not self.data:
+ return {}
+ if self._json_data is None:
+ self._json_data = self.json_deserializer(self.data_as_text)
+ return self._json_data
+
+
+@dataclass(repr=False, order=False)
+class KinesisFirehoseDataTransformationResponse:
+ """Kinesis Data Firehose response object
+
+ Documentation:
+ --------------
+ - https://docs.aws.amazon.com/firehose/latest/dev/data-transformation.html
+
+ Parameters
+ ----------
+ records : List[KinesisFirehoseResponseRecord]
+ records of Kinesis Data Firehose response object,
+ optional parameter at start. can be added later using `add_record` function.
+
+ Examples
+ --------
+
+ **Transforming data records**
+
+ ```python
+ from aws_lambda_powertools.utilities.data_classes import (
+ KinesisFirehoseDataTransformationRecord,
+ KinesisFirehoseDataTransformationResponse,
+ KinesisFirehoseEvent,
+ )
+ from aws_lambda_powertools.utilities.serialization import base64_from_json
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+ def lambda_handler(event: dict, context: LambdaContext):
+ firehose_event = KinesisFirehoseEvent(event)
+ result = KinesisFirehoseDataTransformationResponse()
+
+ for record in firehose_event.records:
+ payload = record.data_as_text # base64 decoded data as str
+
+ ## generate data to return
+ transformed_data = {"tool_used": "powertools_dataclass", "original_payload": payload}
+ processed_record = KinesisFirehoseDataTransformationRecord(
+ record_id=record.record_id,
+ data=base64_from_json(transformed_data),
+ )
+
+ result.add_record(processed_record)
+
+ # return transformed records
+ return result.asdict()
+ ```
+ """
+
+ records: List[KinesisFirehoseDataTransformationRecord] = field(default_factory=list)
+
+ def add_record(self, record: KinesisFirehoseDataTransformationRecord):
+ self.records.append(record)
+
+ def asdict(self) -> Dict:
+ if not self.records:
+ raise ValueError("Amazon Kinesis Data Firehose doesn't accept empty response")
+
+ return {"records": [record.asdict() for record in self.records]}
+
+
class KinesisFirehoseRecordMetadata(DictWrapper):
@property
def _metadata(self) -> dict:
@@ -77,6 +247,32 @@ def data_as_json(self) -> dict:
self._json_data = self._json_deserializer(self.data_as_text)
return self._json_data
+ def build_data_transformation_response(
+ self,
+ result: Literal["Ok", "Dropped", "ProcessingFailed"] = "Ok",
+ data: str = "",
+ metadata: Optional[KinesisFirehoseDataTransformationRecordMetadata] = None,
+ ) -> KinesisFirehoseDataTransformationRecord:
+ """Create a KinesisFirehoseResponseRecord directly using the record_id and given values
+
+ Parameters
+ ----------
+ result : Literal["Ok", "Dropped", "ProcessingFailed"]
+ processing result, supported value: Ok, Dropped, ProcessingFailed
+ data : str, optional
+ data blob, base64-encoded, optional at init. Allows pass in base64-encoded data directly or
+ use either function like `data_from_text`, `data_from_json` to populate data
+ metadata: KinesisFirehoseResponseRecordMetadata, optional
+ Metadata associated with this record; can contain partition keys
+ - https://docs.aws.amazon.com/firehose/latest/dev/dynamic-partitioning.html
+ """
+ return KinesisFirehoseDataTransformationRecord(
+ record_id=self.record_id,
+ result=result,
+ data=data,
+ metadata=metadata,
+ )
+
class KinesisFirehoseEvent(DictWrapper):
"""Kinesis Data Firehose event
diff --git a/aws_lambda_powertools/utilities/data_classes/secrets_manager_event.py b/aws_lambda_powertools/utilities/data_classes/secrets_manager_event.py
new file mode 100644
index 00000000000..1a3a1c5b7f4
--- /dev/null
+++ b/aws_lambda_powertools/utilities/data_classes/secrets_manager_event.py
@@ -0,0 +1,24 @@
+from aws_lambda_powertools.shared.types import Literal
+from aws_lambda_powertools.utilities.data_classes.common import DictWrapper
+
+
+class SecretsManagerEvent(DictWrapper):
+ @property
+ def secret_id(self) -> str:
+ """SecretId: The secret ARN or identifier"""
+ return self["SecretId"]
+
+ @property
+ def client_request_token(self) -> str:
+ """ClientRequestToken: The ClientRequestToken associated with the secret version"""
+ return self["ClientRequestToken"]
+
+ @property
+ def version_id(self) -> str:
+ """Alias to ClientRequestToken to get token associated to version"""
+ return self["ClientRequestToken"]
+
+ @property
+ def step(self) -> Literal["createSecret", "setSecret", "testSecret", "finishSecret"]:
+ """Step: The rotation step (one of createSecret, setSecret, testSecret, or finishSecret)"""
+ return self["Step"]
diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py
index e4be9d33cdc..710634636d0 100644
--- a/aws_lambda_powertools/utilities/parameters/base.py
+++ b/aws_lambda_powertools/utilities/parameters/base.py
@@ -147,7 +147,7 @@ def get(
return value
@abstractmethod
- def _get(self, name: str, **sdk_options) -> Union[str, bytes]:
+ def _get(self, name: str, **sdk_options) -> Union[str, bytes, Dict[str, Any]]:
"""
Retrieve parameter value from the underlying parameter store
"""
diff --git a/aws_lambda_powertools/utilities/parameters/ssm.py b/aws_lambda_powertools/utilities/parameters/ssm.py
index dec785b97a8..e27fedb5027 100644
--- a/aws_lambda_powertools/utilities/parameters/ssm.py
+++ b/aws_lambda_powertools/utilities/parameters/ssm.py
@@ -8,7 +8,6 @@
import boto3
from botocore.config import Config
-from typing_extensions import Literal
from aws_lambda_powertools.shared import constants
from aws_lambda_powertools.shared.functions import (
@@ -16,6 +15,7 @@
resolve_truthy_env_var_choice,
slice_dictionary,
)
+from aws_lambda_powertools.shared.types import Literal
from .base import DEFAULT_MAX_AGE_SECS, DEFAULT_PROVIDERS, BaseProvider, transform_value
from .exceptions import GetParameterError
diff --git a/aws_lambda_powertools/utilities/parameters/types.py b/aws_lambda_powertools/utilities/parameters/types.py
index 6a15873c496..faa06cee89e 100644
--- a/aws_lambda_powertools/utilities/parameters/types.py
+++ b/aws_lambda_powertools/utilities/parameters/types.py
@@ -1,3 +1,3 @@
-from typing_extensions import Literal
+from aws_lambda_powertools.shared.types import Literal
TransformOptions = Literal["json", "binary", "auto", None]
diff --git a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py
index 71e560276a4..d236f4652ed 100644
--- a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py
+++ b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py
@@ -3,10 +3,12 @@
import logging
import zlib
from datetime import datetime
-from typing import List, Type, Union
+from typing import Type, Union
from pydantic import BaseModel, Field, validator
+from aws_lambda_powertools.shared.types import List
+
logger = logging.getLogger(__name__)
diff --git a/aws_lambda_powertools/utilities/parser/types.py b/aws_lambda_powertools/utilities/parser/types.py
index d3f00646d52..5282ccee373 100644
--- a/aws_lambda_powertools/utilities/parser/types.py
+++ b/aws_lambda_powertools/utilities/parser/types.py
@@ -1,15 +1,10 @@
"""Generics and other shared types used across parser"""
-import sys
from typing import Any, Dict, Type, TypeVar, Union
from pydantic import BaseModel, Json
-# We only need typing_extensions for python versions <3.8
-if sys.version_info >= (3, 8):
- from typing import Literal
-else:
- from typing_extensions import Literal
+from aws_lambda_powertools.shared.types import Literal
Model = TypeVar("Model", bound=BaseModel)
EnvelopeModel = TypeVar("EnvelopeModel")
diff --git a/aws_lambda_powertools/utilities/serialization.py b/aws_lambda_powertools/utilities/serialization.py
new file mode 100644
index 00000000000..ef76eec70e2
--- /dev/null
+++ b/aws_lambda_powertools/utilities/serialization.py
@@ -0,0 +1,59 @@
+"""Standalone functions to serialize/deserialize common data structures"""
+import base64
+import json
+from typing import Any, Callable
+
+
+def base64_encode(data: str) -> str:
+ """Encode a string and returns Base64-encoded encoded value.
+
+ Parameters
+ ----------
+ data: str
+ The string to encode.
+
+ Returns
+ -------
+ str
+ The Base64-encoded encoded value.
+ """
+ return base64.b64encode(data.encode()).decode("utf-8")
+
+
+def base64_decode(data: str) -> str:
+ """Decodes a Base64-encoded string and returns the decoded value.
+
+ Parameters
+ ----------
+ data: str
+ The Base64-encoded string to decode.
+
+ Returns
+ -------
+ str
+ The decoded string value.
+ """
+ return base64.b64decode(data).decode("utf-8")
+
+
+def base64_from_str(data: str) -> str:
+ """Encode str as base64 string"""
+ return base64.b64encode(data.encode()).decode("utf-8")
+
+
+def base64_from_json(data: Any, json_serializer: Callable[..., str] = json.dumps) -> str:
+ """Encode JSON serializable data as base64 string
+
+ Parameters
+ ----------
+ data: Any
+ JSON serializable (dict, list, boolean, etc.)
+ json_serializer: Callable
+ function to serialize `obj` to a JSON formatted `str`, by default json.dumps
+
+ Returns
+ -------
+ str:
+ JSON string as base64 string
+ """
+ return base64_from_str(data=json_serializer(data))
diff --git a/aws_lambda_powertools/utilities/streaming/s3_object.py b/aws_lambda_powertools/utilities/streaming/s3_object.py
index 7f0ce17b4ae..f7d17f7726e 100644
--- a/aws_lambda_powertools/utilities/streaming/s3_object.py
+++ b/aws_lambda_powertools/utilities/streaming/s3_object.py
@@ -15,8 +15,7 @@
overload,
)
-from typing_extensions import Literal
-
+from aws_lambda_powertools.shared.types import Literal
from aws_lambda_powertools.utilities.streaming._s3_seekable_io import _S3SeekableIO
from aws_lambda_powertools.utilities.streaming.transformations import (
CsvTransform,
diff --git a/docs/Dockerfile b/docs/Dockerfile
index 8e3d16dc1ce..4951f424a97 100644
--- a/docs/Dockerfile
+++ b/docs/Dockerfile
@@ -1,5 +1,5 @@
# v9.1.18
-FROM squidfunk/mkdocs-material@sha256:dd1770cc7bcb103d9fd3618006c66aaf6c0c829e23974ec0c0a3700558c553a1
+FROM squidfunk/mkdocs-material@sha256:c4890abf881d379f57b058ff2a387ab75a30f63194ddd004420ed3341e98fe9f
# pip-compile --generate-hashes --output-file=requirements.txt requirements.in
COPY requirements.txt /tmp/
RUN pip install --require-hashes -r /tmp/requirements.txt
diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md
index fc57e9b1f6c..4e4e935f699 100644
--- a/docs/core/event_handler/api_gateway.md
+++ b/docs/core/event_handler/api_gateway.md
@@ -847,7 +847,7 @@ A micro function means that your final code artifact will be different to each f
**Benefits**
* **Granular scaling**. A micro function can benefit from the [Lambda scaling model](https://docs.aws.amazon.com/lambda/latest/dg/invocation-scaling.html){target="_blank"} to scale differently depending on each part of your application. Concurrency controls and provisioned concurrency can also be used at a granular level for capacity management.
-* **Discoverability**. Micro functions are easier do visualize when using distributed tracing. Their high-level architectures can be self-explanatory, and complexity is highly visible — assuming each function is named to the business purpose it serves.
+* **Discoverability**. Micro functions are easier to visualize when using distributed tracing. Their high-level architectures can be self-explanatory, and complexity is highly visible — assuming each function is named to the business purpose it serves.
* **Package size**. An independent function can be significant smaller (KB vs MB) depending on external dependencies it require to perform its purpose. Conversely, a monolithic approach can benefit from [Lambda Layers](https://docs.aws.amazon.com/lambda/latest/dg/invocation-layers.html){target="_blank"} to optimize builds for external dependencies.
**Downsides**
@@ -859,6 +859,35 @@ your development, building, deployment tooling need to accommodate the distinct
* **Slower safe deployments**. Safely deploying multiple functions require coordination — AWS CodeDeploy deploys and verifies each function sequentially. This increases lead time substantially (minutes to hours) depending on the deployment strategy you choose. You can mitigate it by selectively enabling it in prod-like environments only, and where the risk profile is applicable.
* Automated testing, operational and security reviews are essential to stability in either approaches.
+**Example**
+
+Consider a simplified micro function structured REST API that has two routes:
+
+* `/users` - an endpoint that will return all users of the application on `GET` requests
+* `/users/` - an endpoint that looks up a single users details by ID on `GET` requests
+
+Each endpoint will be it's own Lambda function that is configured as a [Lambda integration](https://docs.aws.amazon.com/apigateway/latest/developerguide/getting-started-with-lambda-integration.html){target="_blank"}. This allows you to set different configurations for each lambda (memory size, layers, etc.).
+
+=== "`/users` Endpoint"
+ ```python
+ --8<-- "examples/event_handler_rest/src/micro_function_all_users_route.py"
+ ```
+
+=== "`/users/` Endpoint"
+ ```python
+ --8<-- "examples/event_handler_rest/src/micro_function_user_by_id_route.py"
+ ```
+
+=== "Micro Function Example SAM Template"
+ ```yaml
+ --8<-- "examples/event_handler_rest/sam/micro_function_template.yaml"
+ ```
+
+
+???+ note
+ You can see some of the downsides in this example such as some code reuse. If set up with proper build tooling, the `User` class could be shared across functions. This could be accomplished by packaging shared code as a [Lambda Layer](https://docs.aws.amazon.com/lambda/latest/dg/chapter-layers.html){target="_blank"} or [Pants](https://www.pantsbuild.org/docs/awslambda-python){target="_blank" rel="nofollow"}.
+
+
## Testing your code
You can test your routes by passing a proxy event request with required params.
diff --git a/docs/index.md b/docs/index.md
index 151915c2519..59e460ff976 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -26,8 +26,8 @@ Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverles
You can install Powertools for AWS Lambda (Python) using one of the following options:
-* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:42**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard:
-* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard:
+* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:43**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard:
+* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard:
* **Pip**: **[`pip install "aws-lambda-powertools"`](#){: .copyMe}:clipboard:**
!!! question "Looking for Pip signed releases? [Learn more about verifying signed builds](./security.md#verifying-signed-builds)"
@@ -80,61 +80,61 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
| Region | Layer ARN |
| ---------------- | ---------------------------------------------------------------------------------------------------------- |
- | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `ap-south-2` | [arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `ap-southeast-4` | [arn:aws:lambda:ap-southeast-4:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `eu-central-2` | [arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `eu-south-2` | [arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `il-central-1` | [arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:2](#){: .copyMe}:clipboard: |
- | `me-central-1` | [arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
- | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:42](#){: .copyMe}:clipboard: |
+ | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `ap-south-2` | [arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `ap-southeast-4` | [arn:aws:lambda:ap-southeast-4:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `eu-central-2` | [arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `eu-south-2` | [arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `il-central-1` | [arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:3](#){: .copyMe}:clipboard: |
+ | `me-central-1` | [arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
+ | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:43](#){: .copyMe}:clipboard: |
=== "arm64"
| Region | Layer ARN |
| ---------------- | ---------------------------------------------------------------------------------------------------------------- |
- | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
- | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42](#){: .copyMe}:clipboard: |
+ | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
+ | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43](#){: .copyMe}:clipboard: |
??? note "Note: Click to expand and copy code snippets for popular frameworks"
@@ -147,7 +147,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
Type: AWS::Serverless::Function
Properties:
Layers:
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:42
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:43
```
=== "Serverless framework"
@@ -157,7 +157,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
hello:
handler: lambda_function.lambda_handler
layers:
- - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:42
+ - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:43
```
=== "CDK"
@@ -173,7 +173,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn(
self,
id="lambda-powertools",
- layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:42"
+ layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:43"
)
aws_lambda.Function(self,
'sample-app-lambda',
@@ -222,7 +222,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
role = aws_iam_role.iam_for_lambda.arn
handler = "index.test"
runtime = "python3.9"
- layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:42"]
+ layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:43"]
source_code_hash = filebase64sha256("lambda_function_payload.zip")
}
@@ -275,7 +275,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
? Do you want to configure advanced settings? Yes
...
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43
❯ amplify push -y
@@ -286,7 +286,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
- Name:
? Which setting do you want to update? Lambda layers configuration
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:42
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:43
? Do you want to edit the local lambda function now? No
```
@@ -300,7 +300,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
Properties:
Architectures: [arm64]
Layers:
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43
```
=== "Serverless framework"
@@ -311,7 +311,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
handler: lambda_function.lambda_handler
architecture: arm64
layers:
- - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42
+ - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43
```
=== "CDK"
@@ -327,7 +327,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn(
self,
id="lambda-powertools",
- layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42"
+ layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43"
)
aws_lambda.Function(self,
'sample-app-lambda',
@@ -377,7 +377,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
role = aws_iam_role.iam_for_lambda.arn
handler = "index.test"
runtime = "python3.9"
- layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42"]
+ layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43"]
architectures = ["arm64"]
source_code_hash = filebase64sha256("lambda_function_payload.zip")
@@ -433,7 +433,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
? Do you want to configure advanced settings? Yes
...
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43
❯ amplify push -y
@@ -444,7 +444,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
- Name:
? Which setting do you want to update? Lambda layers configuration
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:42
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:43
? Do you want to edit the local lambda function now? No
```
@@ -452,7 +452,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
Change {region} to your AWS region, e.g. `eu-west-1`
```bash title="AWS CLI"
- aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:42 --region {region}
+ aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:43 --region {region}
```
The pre-signed URL to download this Lambda Layer will be within `Location` key.
diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md
index 7b3aa74e275..fd4a176f631 100644
--- a/docs/utilities/data_classes.md
+++ b/docs/utilities/data_classes.md
@@ -197,7 +197,7 @@ Use **`APIGatewayAuthorizerRequestEvent`** for type `REQUEST` and **`APIGatewayA
if user.get("isAdmin", False):
policy.allow_all_routes()
else:
- policy.allow_route(HttpVerb.GET, "/user-profile")
+ policy.allow_route(HttpVerb.GET.value, "/user-profile")
return policy.asdict()
```
@@ -975,18 +975,39 @@ or plain text, depending on the original payload.
### Kinesis Firehose delivery stream
-Kinesis Firehose Data Transformation can use a Lambda Function to modify the records
-inline, and re-emit them back to the Delivery Stream.
+When using Kinesis Firehose, you can use a Lambda function to [perform data transformation](https://docs.aws.amazon.com/firehose/latest/dev/data-transformation.html){target="_blank"}. For each transformed record, you can choose to either:
-Similar to Kinesis Data Streams, the events contain base64 encoded data. You can use the helper
-function to access the data either as json or plain text, depending on the original payload.
+* **A)** Put them back to the delivery stream (default)
+* **B)** Drop them so consumers don't receive them (e.g., data validation)
+* **C)** Indicate a record failed data transformation and should be retried
-=== "app.py"
+To do that, you can use `KinesisFirehoseDataTransformationResponse` class along with helper functions to make it easier to decode and encode base64 data in the stream.
- ```python
+=== "Transforming streaming records"
+
+ ```python hl_lines="2-3 12 28"
--8<-- "examples/event_sources/src/kinesis_firehose_delivery_stream.py"
```
+ 1. **Ingesting JSON payloads?**
Use `record.data_as_json` to easily deserialize them.
+ 2. For your convenience, `base64_from_json` serializes a dict to JSON, then encode as base64 data.
+
+=== "Dropping invalid records"
+
+ ```python hl_lines="5-6 16 34"
+ --8<-- "examples/event_sources/src/kinesis_firehose_response_drop.py"
+ ```
+
+ 1. This exception would be generated from `record.data_as_json` if invalid payload.
+
+=== "Indicating a processing failure"
+
+ ```python hl_lines="2-3 33"
+ --8<-- "examples/event_sources/src/kinesis_firehose_response_exception.py"
+ ```
+
+ 1. This record will now be sent to your [S3 bucket in the `processing-failed` folder](https://docs.aws.amazon.com/firehose/latest/dev/data-transformation.html#data-transformation-failure-handling){target="_blank"}.
+
### Lambda Function URL
=== "app.py"
@@ -1095,6 +1116,22 @@ This example is based on the AWS Blog post [Introducing Amazon S3 Object Lambda
file_key = event.detail.object.key
```
+### Secrets Manager
+
+AWS Secrets Manager rotation uses an AWS Lambda function to update the secret. [Click here](https://docs.aws.amazon.com/secretsmanager/latest/userguide/rotating-secrets.html){target="_blank"} for more information about rotating AWS Secrets Manager secrets.
+
+=== "app.py"
+
+ ```python hl_lines="2 7 11"
+ --8<-- "examples/event_sources/src/secrets_manager.py"
+ ```
+
+=== "Secrets Manager Example Event"
+
+ ```json
+ --8<-- "tests/events/secretsManagerEvent.json"
+ ```
+
### SES
=== "app.py"
diff --git a/docs/we_made_this.md b/docs/we_made_this.md
index a1229ec99aa..c3abaaa1dfa 100644
--- a/docs/we_made_this.md
+++ b/docs/we_made_this.md
@@ -106,7 +106,7 @@ When building applications with AWS Lambda it is critical to verify the data str
In this session you will learn how to increase code quality, extensibility and testability, boost you productivity and ship rock solid apps to production.
-
+
#### Talk DEV to me | Feature Flags with AWS Lambda Powertools
@@ -114,7 +114,7 @@ In this session you will learn how to increase code quality, extensibility and t
A deep dive in the [Feature Flags](./utilities/feature_flags.md){target="_blank" rel="nofollow"} feature along with tips and tricks.
-
+
#### Level Up Your CI/CD With Smart AWS Feature Flags
diff --git a/examples/event_handler_rest/sam/micro_function_template.yaml b/examples/event_handler_rest/sam/micro_function_template.yaml
new file mode 100644
index 00000000000..fb27206fddf
--- /dev/null
+++ b/examples/event_handler_rest/sam/micro_function_template.yaml
@@ -0,0 +1,63 @@
+AWSTemplateFormatVersion: '2010-09-09'
+Transform: AWS::Serverless-2016-10-31
+Description: >
+ micro-function-example
+
+Globals:
+ Api:
+ TracingEnabled: true
+ Cors: # see CORS section
+ AllowOrigin: "'https://example.com'"
+ AllowHeaders: "'Content-Type,Authorization,X-Amz-Date'"
+ MaxAge: "'300'"
+ BinaryMediaTypes: # see Binary responses section
+ - "*~1*" # converts to */* for any binary type
+
+ Function:
+ Timeout: 5
+ Runtime: python3.11
+
+Resources:
+ # Lambda Function Solely For /users endpoint
+ AllUsersFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: app.lambda_handler
+ CodeUri: users
+ Description: Function for /users endpoint
+ Architectures:
+ - x86_64
+ Tracing: Active
+ Events:
+ UsersPath:
+ Type: Api
+ Properties:
+ Path: /users
+ Method: GET
+ MemorySize: 128 # Each Lambda Function can have it's own memory configuration
+ Environment:
+ Variables:
+ LOG_LEVEL: INFO
+ Tags:
+ LambdaPowertools: python
+
+ # Lambda Function Solely For /users/{id} endpoint
+ UserByIdFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ Handler: app.lambda_handler
+ CodeUri: users_by_id
+ Description: Function for /users/{id} endpoint
+ Architectures:
+ - x86_64
+ Tracing: Active
+ Events:
+ UsersByIdPath:
+ Type: Api
+ Properties:
+ Path: /users/{id+}
+ Method: GET
+ MemorySize: 128 # Each Lambda Function can have it's own memory configuration
+ Environment:
+ Variables:
+ LOG_LEVEL: INFO
diff --git a/examples/event_handler_rest/src/micro_function_all_users_route.py b/examples/event_handler_rest/src/micro_function_all_users_route.py
new file mode 100644
index 00000000000..1a809634b45
--- /dev/null
+++ b/examples/event_handler_rest/src/micro_function_all_users_route.py
@@ -0,0 +1,56 @@
+import json
+from dataclasses import dataclass
+from http import HTTPStatus
+
+from aws_lambda_powertools import Logger
+from aws_lambda_powertools.event_handler import APIGatewayRestResolver, Response
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+logger = Logger()
+
+# This would likely be a db lookup
+users = [
+ {
+ "user_id": "b0b2a5bf-ee1e-4c5e-9a86-91074052739e",
+ "email": "john.doe@example.com",
+ "active": True,
+ },
+ {
+ "user_id": "3a9df6b1-938c-4e80-bd4a-0c966f4b1c1e",
+ "email": "jane.smith@example.com",
+ "active": False,
+ },
+ {
+ "user_id": "aa0d3d09-9cb9-42b9-9e63-1fb17ea52981",
+ "email": "alex.wilson@example.com",
+ "active": True,
+ },
+]
+
+
+@dataclass
+class User:
+ user_id: str
+ email: str
+ active: bool
+
+
+app = APIGatewayRestResolver()
+
+
+@app.get("/users")
+def all_active_users():
+ """HTTP Response for all active users"""
+ all_users = [User(**user) for user in users]
+ all_active_users = [user.__dict__ for user in all_users if user.active]
+
+ return Response(
+ status_code=HTTPStatus.OK.value,
+ content_type="application/json",
+ body=json.dumps(all_active_users),
+ )
+
+
+@logger.inject_lambda_context()
+def lambda_handler(event: dict, context: LambdaContext) -> dict:
+ return app.resolve(event, context)
diff --git a/examples/event_handler_rest/src/micro_function_user_by_id_route.py b/examples/event_handler_rest/src/micro_function_user_by_id_route.py
new file mode 100644
index 00000000000..f47464732a1
--- /dev/null
+++ b/examples/event_handler_rest/src/micro_function_user_by_id_route.py
@@ -0,0 +1,72 @@
+import json
+from dataclasses import dataclass
+from http import HTTPStatus
+from typing import Union
+
+from aws_lambda_powertools import Logger
+from aws_lambda_powertools.event_handler import APIGatewayRestResolver, Response
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+logger = Logger()
+
+# This would likely be a db lookup
+users = [
+ {
+ "user_id": "b0b2a5bf-ee1e-4c5e-9a86-91074052739e",
+ "email": "john.doe@example.com",
+ "active": True,
+ },
+ {
+ "user_id": "3a9df6b1-938c-4e80-bd4a-0c966f4b1c1e",
+ "email": "jane.smith@example.com",
+ "active": False,
+ },
+ {
+ "user_id": "aa0d3d09-9cb9-42b9-9e63-1fb17ea52981",
+ "email": "alex.wilson@example.com",
+ "active": True,
+ },
+]
+
+
+@dataclass
+class User:
+ user_id: str
+ email: str
+ active: bool
+
+
+def get_user_by_id(user_id: str) -> Union[User, None]:
+ for user_data in users:
+ if user_data["user_id"] == user_id:
+ return User(
+ user_id=str(user_data["user_id"]),
+ email=str(user_data["email"]),
+ active=bool(user_data["active"]),
+ )
+
+ return None
+
+
+app = APIGatewayRestResolver()
+
+
+@app.get("/users/")
+def all_active_users(user_id: str):
+ """HTTP Response for all active users"""
+ user = get_user_by_id(user_id)
+
+ if user:
+ return Response(
+ status_code=HTTPStatus.OK.value,
+ content_type="application/json",
+ body=json.dumps(user.__dict__),
+ )
+
+ else:
+ return Response(status_code=HTTPStatus.NOT_FOUND)
+
+
+@logger.inject_lambda_context()
+def lambda_handler(event: dict, context: LambdaContext) -> dict:
+ return app.resolve(event, context)
diff --git a/examples/event_sources/src/kinesis_firehose_delivery_stream.py b/examples/event_sources/src/kinesis_firehose_delivery_stream.py
index 770bfb1ee63..3dc6fbda703 100644
--- a/examples/event_sources/src/kinesis_firehose_delivery_stream.py
+++ b/examples/event_sources/src/kinesis_firehose_delivery_stream.py
@@ -1,28 +1,28 @@
-import base64
-import json
-
from aws_lambda_powertools.utilities.data_classes import (
+ KinesisFirehoseDataTransformationResponse,
KinesisFirehoseEvent,
event_source,
)
+from aws_lambda_powertools.utilities.serialization import base64_from_json
from aws_lambda_powertools.utilities.typing import LambdaContext
@event_source(data_class=KinesisFirehoseEvent)
def lambda_handler(event: KinesisFirehoseEvent, context: LambdaContext):
- result = []
+ result = KinesisFirehoseDataTransformationResponse()
for record in event.records:
- # if data was delivered as json; caches loaded value
- data = record.data_as_json
+ # get original data using data_as_text property
+ data = record.data_as_text # (1)!
+
+ ## generate data to return
+ transformed_data = {"new_data": "transformed data using Powertools", "original_payload": data}
- processed_record = {
- "recordId": record.record_id,
- "data": base64.b64encode(json.dumps(data).encode("utf-8")),
- "result": "Ok",
- }
+ processed_record = record.build_data_transformation_response(
+ data=base64_from_json(transformed_data), # (2)!
+ )
- result.append(processed_record)
+ result.add_record(processed_record)
# return transformed records
- return {"records": result}
+ return result.asdict()
diff --git a/examples/event_sources/src/kinesis_firehose_response_drop.py b/examples/event_sources/src/kinesis_firehose_response_drop.py
new file mode 100644
index 00000000000..8b565480a34
--- /dev/null
+++ b/examples/event_sources/src/kinesis_firehose_response_drop.py
@@ -0,0 +1,40 @@
+from json import JSONDecodeError
+from typing import Dict
+
+from aws_lambda_powertools.utilities.data_classes import (
+ KinesisFirehoseDataTransformationRecord,
+ KinesisFirehoseDataTransformationResponse,
+ KinesisFirehoseEvent,
+ event_source,
+)
+from aws_lambda_powertools.utilities.serialization import base64_from_json
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+@event_source(data_class=KinesisFirehoseEvent)
+def lambda_handler(event: KinesisFirehoseEvent, context: LambdaContext):
+ result = KinesisFirehoseDataTransformationResponse()
+
+ for record in event.records:
+ try:
+ payload: Dict = record.data_as_json # decodes and deserialize base64 JSON string
+
+ ## generate data to return
+ transformed_data = {"tool_used": "powertools_dataclass", "original_payload": payload}
+
+ processed_record = KinesisFirehoseDataTransformationRecord(
+ record_id=record.record_id,
+ data=base64_from_json(transformed_data),
+ )
+ except JSONDecodeError: # (1)!
+ # our producers ingest JSON payloads only; drop malformed records from the stream
+ processed_record = KinesisFirehoseDataTransformationRecord(
+ record_id=record.record_id,
+ data=record.data,
+ result="Dropped",
+ )
+
+ result.add_record(processed_record)
+
+ # return transformed records
+ return result.asdict()
diff --git a/examples/event_sources/src/kinesis_firehose_response_exception.py b/examples/event_sources/src/kinesis_firehose_response_exception.py
new file mode 100644
index 00000000000..43ba3a039b2
--- /dev/null
+++ b/examples/event_sources/src/kinesis_firehose_response_exception.py
@@ -0,0 +1,39 @@
+from aws_lambda_powertools.utilities.data_classes import (
+ KinesisFirehoseDataTransformationRecord,
+ KinesisFirehoseDataTransformationResponse,
+ KinesisFirehoseEvent,
+ event_source,
+)
+from aws_lambda_powertools.utilities.serialization import base64_from_json
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+@event_source(data_class=KinesisFirehoseEvent)
+def lambda_handler(event: dict, context: LambdaContext):
+ firehose_event = KinesisFirehoseEvent(event)
+ result = KinesisFirehoseDataTransformationResponse()
+
+ for record in firehose_event.records:
+ try:
+ payload = record.data_as_text # base64 decoded data as str
+
+ # generate data to return
+ transformed_data = {"tool_used": "powertools_dataclass", "original_payload": payload}
+
+ # Default result is Ok
+ processed_record = KinesisFirehoseDataTransformationRecord(
+ record_id=record.record_id,
+ data=base64_from_json(transformed_data),
+ )
+ except Exception:
+ # add Failed result to processing results, send back to kinesis for retry
+ processed_record = KinesisFirehoseDataTransformationRecord(
+ record_id=record.record_id,
+ data=record.data,
+ result="ProcessingFailed", # (1)!
+ )
+
+ result.add_record(processed_record)
+
+ # return transformed records
+ return result.asdict()
diff --git a/examples/event_sources/src/secrets_manager.py b/examples/event_sources/src/secrets_manager.py
new file mode 100644
index 00000000000..d69b052779e
--- /dev/null
+++ b/examples/event_sources/src/secrets_manager.py
@@ -0,0 +1,16 @@
+from aws_lambda_powertools.utilities import parameters
+from aws_lambda_powertools.utilities.data_classes import SecretsManagerEvent, event_source
+
+secrets_provider = parameters.SecretsProvider()
+
+
+@event_source(data_class=SecretsManagerEvent)
+def lambda_handler(event: SecretsManagerEvent, context):
+ # Getting secret value using Parameter utility
+ # See https://docs.powertools.aws.dev/lambda/python/latest/utilities/parameters/
+ secret = secrets_provider.get(event.secret_id, VersionId=event.version_id, VersionStage="AWSCURRENT")
+
+ # You need to work with secrets afterwards
+ # Check more examples: https://github.com/aws-samples/aws-secrets-manager-rotation-lambdas
+
+ return secret
diff --git a/examples/event_sources/src/secrets_manager_event.json b/examples/event_sources/src/secrets_manager_event.json
new file mode 100644
index 00000000000..18e7dcd935b
--- /dev/null
+++ b/examples/event_sources/src/secrets_manager_event.json
@@ -0,0 +1,5 @@
+{
+ "SecretId":"arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3",
+ "ClientRequestToken":"550e8400-e29b-41d4-a716-446655440000",
+ "Step":"createSecret"
+}
diff --git a/examples/logger/sam/template.yaml b/examples/logger/sam/template.yaml
index 4f4d056188b..b11310ab8a2 100644
--- a/examples/logger/sam/template.yaml
+++ b/examples/logger/sam/template.yaml
@@ -14,7 +14,7 @@ Globals:
Layers:
# Find the latest Layer version in the official documentation
# https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:42
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:43
Resources:
LoggerLambdaHandlerExample:
diff --git a/examples/metrics/sam/template.yaml b/examples/metrics/sam/template.yaml
index 6ad63db6a81..b706ea33e15 100644
--- a/examples/metrics/sam/template.yaml
+++ b/examples/metrics/sam/template.yaml
@@ -15,7 +15,7 @@ Globals:
Layers:
# Find the latest Layer version in the official documentation
# https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:42
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:43
Resources:
CaptureLambdaHandlerExample:
diff --git a/examples/parameters/src/custom_provider_vault.py b/examples/parameters/src/custom_provider_vault.py
index 06d0a929fff..03bab592010 100644
--- a/examples/parameters/src/custom_provider_vault.py
+++ b/examples/parameters/src/custom_provider_vault.py
@@ -1,5 +1,4 @@
-import json
-from typing import Dict
+from typing import Any, Dict
from hvac import Client
@@ -8,21 +7,18 @@
class VaultProvider(BaseProvider):
def __init__(self, vault_url: str, vault_token: str) -> None:
-
super().__init__()
self.vault_client = Client(url=vault_url, verify=False, timeout=10)
self.vault_client.token = vault_token
- def _get(self, name: str, **sdk_options) -> str:
-
+ def _get(self, name: str, **sdk_options) -> Dict[str, Any]:
# for example proposal, the mountpoint is always /secret
kv_configuration = self.vault_client.secrets.kv.v2.read_secret(path=name)
- return json.dumps(kv_configuration["data"]["data"])
+ return kv_configuration["data"]["data"]
def _get_multiple(self, path: str, **sdk_options) -> Dict[str, str]:
-
list_secrets = {}
all_secrets = self.vault_client.secrets.kv.v2.list_secrets(path=path)
diff --git a/examples/parameters/src/working_with_own_provider_vault.py b/examples/parameters/src/working_with_own_provider_vault.py
index 7be9ea60242..71ad43495d2 100644
--- a/examples/parameters/src/working_with_own_provider_vault.py
+++ b/examples/parameters/src/working_with_own_provider_vault.py
@@ -14,10 +14,9 @@
def lambda_handler(event: dict, context: LambdaContext):
-
try:
# Retrieve a single parameter
- endpoint_comments: Any = vault_provider.get("comments_endpoint", transform="json")
+ endpoint_comments: Any = vault_provider.get("comments_endpoint")
# you can get all parameters using get_multiple and specifying vault mount point
# # for testing purposes we will not use it
diff --git a/examples/tracer/sam/template.yaml b/examples/tracer/sam/template.yaml
index abf4f7f4865..2485e729222 100644
--- a/examples/tracer/sam/template.yaml
+++ b/examples/tracer/sam/template.yaml
@@ -13,7 +13,7 @@ Globals:
Layers:
# Find the latest Layer version in the official documentation
# https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:42
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:43
Resources:
CaptureLambdaHandlerExample:
diff --git a/package-lock.json b/package-lock.json
index b35c19c6d09..09582bb03ca 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -11,13 +11,13 @@
"package-lock.json": "^1.0.0"
},
"devDependencies": {
- "aws-cdk": "^2.94.0"
+ "aws-cdk": "^2.96.1"
}
},
"node_modules/aws-cdk": {
- "version": "2.94.0",
- "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.94.0.tgz",
- "integrity": "sha512-9bJkzxFDYZDwPDfZi/DSUODn4HFRzuXWPhpFgIIgRykfT18P+iAIJ1AEhaaCmlqrrog5yQgN+2iYd9BwDsiBeg==",
+ "version": "2.96.1",
+ "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.96.1.tgz",
+ "integrity": "sha512-dCMriGZj2w6/B5+bu45knQM9QmPpDoUMiGCzsxALsOJVu/Fr5QwvmHxRBTc48uaVAOlYN2qQsAcG5H6TXtBJhg==",
"dev": true,
"bin": {
"cdk": "bin/cdk"
@@ -51,9 +51,9 @@
},
"dependencies": {
"aws-cdk": {
- "version": "2.94.0",
- "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.94.0.tgz",
- "integrity": "sha512-9bJkzxFDYZDwPDfZi/DSUODn4HFRzuXWPhpFgIIgRykfT18P+iAIJ1AEhaaCmlqrrog5yQgN+2iYd9BwDsiBeg==",
+ "version": "2.96.1",
+ "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.96.1.tgz",
+ "integrity": "sha512-dCMriGZj2w6/B5+bu45knQM9QmPpDoUMiGCzsxALsOJVu/Fr5QwvmHxRBTc48uaVAOlYN2qQsAcG5H6TXtBJhg==",
"dev": true,
"requires": {
"fsevents": "2.3.2"
diff --git a/package.json b/package.json
index 55eb952cf6e..6027410859b 100644
--- a/package.json
+++ b/package.json
@@ -2,7 +2,7 @@
"name": "aws-lambda-powertools-python-e2e",
"version": "1.0.0",
"devDependencies": {
- "aws-cdk": "^2.94.0"
+ "aws-cdk": "^2.96.1"
},
"dependencies": {
"package-lock.json": "^1.0.0"
diff --git a/poetry.lock b/poetry.lock
index 3bf9a3f1127..af79aeccb90 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -404,13 +404,13 @@ files = [
[[package]]
name = "cfn-lint"
-version = "0.79.9"
+version = "0.79.11"
description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved"
optional = false
python-versions = ">=3.7, <=4.0, !=4.0"
files = [
- {file = "cfn-lint-0.79.9.tar.gz", hash = "sha256:fb8a5fc674ce39469a66d37de19130f4b31fbe4685a19b65ec51c8c8f35e8990"},
- {file = "cfn_lint-0.79.9-py3-none-any.whl", hash = "sha256:e151194ca5b4994d68593f4b0fd3cda3bb28edea2023528ca34ecf82b914eec1"},
+ {file = "cfn-lint-0.79.11.tar.gz", hash = "sha256:aef38604fce280aacc1bb02f673ebab101503a7660a0d18e08e1df58f3bca885"},
+ {file = "cfn_lint-0.79.11-py3-none-any.whl", hash = "sha256:f97f7a6ba9ed3b4715ea4064b437f522235be087a6661d813dbd089757c3a714"},
]
[package.dependencies]
@@ -421,7 +421,7 @@ jsonschema = ">=3.0,<4.18"
junit-xml = ">=1.9,<2.0"
networkx = ">=2.4,<4"
pyyaml = ">5.4"
-regex = "*"
+regex = ">=2021.7.1"
sarif-om = ">=1.0.4,<1.1.0"
sympy = ">=1.0.0"
@@ -990,13 +990,13 @@ socks = ["socksio (==1.*)"]
[[package]]
name = "hvac"
-version = "1.2.0"
+version = "1.2.1"
description = "HashiCorp Vault API client"
optional = false
python-versions = ">=3.6.2,<4.0.0"
files = [
- {file = "hvac-1.2.0-py3-none-any.whl", hash = "sha256:95716e0a6c081214d5f6dc74548d6e388aca895cd7be152cfaf177f7520b3d6e"},
- {file = "hvac-1.2.0.tar.gz", hash = "sha256:6f5aa0d6b8138b585d4656d1fe01b5d87616310c80484b909cc84c2cb8f064fd"},
+ {file = "hvac-1.2.1-py3-none-any.whl", hash = "sha256:cb87f5724be8fd5f57507f5d5a94e6c42d2675128b460bf3186f966e07d4db78"},
+ {file = "hvac-1.2.1.tar.gz", hash = "sha256:c786e3dfa1f35239810e5317cccadbe358f49b8c9001a1f2f68b79a250b9f8a1"},
]
[package.dependencies]
@@ -1792,13 +1792,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""}
[[package]]
name = "mypy-boto3-xray"
-version = "1.28.36"
-description = "Type annotations for boto3.XRay 1.28.36 service generated with mypy-boto3-builder 7.18.0"
+version = "1.28.47"
+description = "Type annotations for boto3.XRay 1.28.47 service generated with mypy-boto3-builder 7.19.0"
optional = false
python-versions = ">=3.7"
files = [
- {file = "mypy-boto3-xray-1.28.36.tar.gz", hash = "sha256:fc7dfbd85d78c14bc45a823165c61dd084a36d7700b4935f88ff3a7b8e8dac48"},
- {file = "mypy_boto3_xray-1.28.36-py3-none-any.whl", hash = "sha256:57a4a32fcc0368e5ec6c58d67f7abdc7332bedb7236ef072c157ae21fb44a332"},
+ {file = "mypy-boto3-xray-1.28.47.tar.gz", hash = "sha256:d4e893a5b6819b668096ce00ed969904d00cf034d1b9073034a243ab794e28c6"},
+ {file = "mypy_boto3_xray-1.28.47-py3-none-any.whl", hash = "sha256:3af3f00c48fd379fe264ab4050f5fcb1dba8a404a8cd8b0b08aa3ec70860b2d0"},
]
[package.dependencies]
@@ -2512,28 +2512,28 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]]
name = "ruff"
-version = "0.0.287"
+version = "0.0.289"
description = "An extremely fast Python linter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
- {file = "ruff-0.0.287-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:1e0f9ee4c3191444eefeda97d7084721d9b8e29017f67997a20c153457f2eafd"},
- {file = "ruff-0.0.287-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e9843e5704d4fb44e1a8161b0d31c1a38819723f0942639dfeb53d553be9bfb5"},
- {file = "ruff-0.0.287-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca1ed11d759a29695aed2bfc7f914b39bcadfe2ef08d98ff69c873f639ad3a8"},
- {file = "ruff-0.0.287-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cf4d5ad3073af10f186ea22ce24bc5a8afa46151f6896f35c586e40148ba20b"},
- {file = "ruff-0.0.287-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d9d58bcb29afd72d2afe67120afcc7d240efc69a235853813ad556443dc922"},
- {file = "ruff-0.0.287-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:06ac5df7dd3ba8bf83bba1490a72f97f1b9b21c7cbcba8406a09de1a83f36083"},
- {file = "ruff-0.0.287-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2bfb478e1146a60aa740ab9ebe448b1f9e3c0dfb54be3cc58713310eef059c30"},
- {file = "ruff-0.0.287-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00d579a011949108c4b4fa04c4f1ee066dab536a9ba94114e8e580c96be2aeb4"},
- {file = "ruff-0.0.287-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a810a79b8029cc92d06c36ea1f10be5298d2323d9024e1d21aedbf0a1a13e5"},
- {file = "ruff-0.0.287-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:150007028ad4976ce9a7704f635ead6d0e767f73354ce0137e3e44f3a6c0963b"},
- {file = "ruff-0.0.287-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a24a280db71b0fa2e0de0312b4aecb8e6d08081d1b0b3c641846a9af8e35b4a7"},
- {file = "ruff-0.0.287-py3-none-musllinux_1_2_i686.whl", hash = "sha256:2918cb7885fa1611d542de1530bea3fbd63762da793751cc8c8d6e4ba234c3d8"},
- {file = "ruff-0.0.287-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:33d7b251afb60bec02a64572b0fd56594b1923ee77585bee1e7e1daf675e7ae7"},
- {file = "ruff-0.0.287-py3-none-win32.whl", hash = "sha256:022f8bed2dcb5e5429339b7c326155e968a06c42825912481e10be15dafb424b"},
- {file = "ruff-0.0.287-py3-none-win_amd64.whl", hash = "sha256:26bd0041d135a883bd6ab3e0b29c42470781fb504cf514e4c17e970e33411d90"},
- {file = "ruff-0.0.287-py3-none-win_arm64.whl", hash = "sha256:44bceb3310ac04f0e59d4851e6227f7b1404f753997c7859192e41dbee9f5c8d"},
- {file = "ruff-0.0.287.tar.gz", hash = "sha256:02dc4f5bf53ef136e459d467f3ce3e04844d509bc46c025a05b018feb37bbc39"},
+ {file = "ruff-0.0.289-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:c9a89d748e90c840bac9c37afe90cf13a5bfd460ca02ea93dad9d7bee3af03b4"},
+ {file = "ruff-0.0.289-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:7f7396c6ea01ba332a6ad9d47642bac25d16bd2076aaa595b001f58b2f32ff05"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7180de86c8ecd39624dec1699136f941c07e723201b4ce979bec9e7c67b40ad2"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73f37c65508203dd01a539926375a10243769c20d4fcab3fa6359cd3fbfc54b7"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c14abcd7563b5c80be2dd809eeab20e4aa716bf849860b60a22d87ddf19eb88"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:91b6d63b6b46d4707916472c91baa87aa0592e73f62a80ff55efdf6c0668cfd6"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6479b8c4be3c36046c6c92054762b276fa0fddb03f6b9a310fbbf4c4951267fd"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5424318c254bcb091cb67e140ec9b9f7122074e100b06236f252923fb41e767"},
+ {file = "ruff-0.0.289-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4daa90865796aedcedf0d8897fdd4cd09bf0ddd3504529a4ccf211edcaff3c7d"},
+ {file = "ruff-0.0.289-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8057e8ab0016c13b9419bad119e854f881e687bd96bc5e2d52c8baac0f278a44"},
+ {file = "ruff-0.0.289-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7eebfab2e6a6991908ff1bf82f2dc1e5095fc7e316848e62124526837b445f4d"},
+ {file = "ruff-0.0.289-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ebc7af550018001a7fb39ca22cdce20e1a0de4388ea4a007eb5c822f6188c297"},
+ {file = "ruff-0.0.289-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6e4e6eccb753efe760ba354fc8e9f783f6bba71aa9f592756f5bd0d78db898ed"},
+ {file = "ruff-0.0.289-py3-none-win32.whl", hash = "sha256:bbb3044f931c09cf17dbe5b339896eece0d6ac10c9a86e172540fcdb1974f2b7"},
+ {file = "ruff-0.0.289-py3-none-win_amd64.whl", hash = "sha256:6d043c5456b792be2615a52f16056c3cf6c40506ce1f2d6f9d3083cfcb9eeab6"},
+ {file = "ruff-0.0.289-py3-none-win_arm64.whl", hash = "sha256:04a720bcca5e987426bb14ad8b9c6f55e259ea774da1cbeafe71569744cfd20a"},
+ {file = "ruff-0.0.289.tar.gz", hash = "sha256:2513f853b0fc42f0339b7ab0d2751b63ce7a50a0032d2689b54b2931b3b866d7"},
]
[[package]]
@@ -2570,13 +2570,13 @@ pbr = "*"
[[package]]
name = "sentry-sdk"
-version = "1.30.0"
+version = "1.31.0"
description = "Python client for Sentry (https://sentry.io)"
optional = false
python-versions = "*"
files = [
- {file = "sentry-sdk-1.30.0.tar.gz", hash = "sha256:7dc873b87e1faf4d00614afd1058bfa1522942f33daef8a59f90de8ed75cd10c"},
- {file = "sentry_sdk-1.30.0-py2.py3-none-any.whl", hash = "sha256:2e53ad63f96bb9da6570ba2e755c267e529edcf58580a2c0d2a11ef26e1e678b"},
+ {file = "sentry-sdk-1.31.0.tar.gz", hash = "sha256:6de2e88304873484207fed836388e422aeff000609b104c802749fd89d56ba5b"},
+ {file = "sentry_sdk-1.31.0-py2.py3-none-any.whl", hash = "sha256:64a7141005fb775b9db298a30de93e3b83e0ddd1232dc6f36eb38aebc1553291"},
]
[package.dependencies]
@@ -2586,10 +2586,12 @@ urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""}
[package.extras]
aiohttp = ["aiohttp (>=3.5)"]
arq = ["arq (>=0.23)"]
+asyncpg = ["asyncpg (>=0.23)"]
beam = ["apache-beam (>=2.12)"]
bottle = ["bottle (>=0.12.13)"]
celery = ["celery (>=3)"]
chalice = ["chalice (>=1.16.0)"]
+clickhouse-driver = ["clickhouse-driver (>=0.2.0)"]
django = ["django (>=1.8)"]
falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"]
@@ -3002,4 +3004,4 @@ validation = ["fastjsonschema"]
[metadata]
lock-version = "2.0"
python-versions = "^3.7.4"
-content-hash = "5d14edcdc4a612dae00c767a4492e297ebf1a0f9e34d48792653bbefed72e09e"
+content-hash = "47678477156d6633904ebc1a5b35ea018da5f6095d9a7162fc552cda8130f1c3"
diff --git a/pyproject.toml b/pyproject.toml
index 04642131f3c..dd0c208f49a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "aws_lambda_powertools"
-version = "2.23.1"
+version = "2.25.0"
description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity."
authors = ["Amazon Web Services"]
include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"]
@@ -66,7 +66,7 @@ mypy-boto3-logs = "^1.28.36"
mypy-boto3-secretsmanager = "^1.28.36"
mypy-boto3-ssm = "^1.28.36"
mypy-boto3-s3 = "^1.28.36"
-mypy-boto3-xray = "^1.28.36"
+mypy-boto3-xray = "^1.28.47"
types-requests = "^2.31.0"
typing-extensions = "^4.6.2"
mkdocs-material = "^9.2.7"
@@ -75,7 +75,7 @@ checksumdir = "^1.2.0"
mypy-boto3-appconfigdata = "^1.28.36"
ijson = "^3.2.2"
typed-ast = { version = "^1.5.5", python = "< 3.8"}
-hvac = "^1.2.0"
+hvac = "^1.2.1"
aws-requests-auth = "^0.4.3"
datadog-lambda = "^4.77.0"
@@ -89,12 +89,12 @@ aws-sdk = ["boto3"]
datadog=["datadog-lambda"]
[tool.poetry.group.dev.dependencies]
-cfn-lint = "0.79.9"
+cfn-lint = "0.79.11"
mypy = "^1.1.1"
types-python-dateutil = "^2.8.19.6"
httpx = ">=0.23.3,<0.25.0"
sentry-sdk = "^1.22.2"
-ruff = ">=0.0.272,<0.0.288"
+ruff = ">=0.0.272,<0.0.290"
retry2 = "^0.9.5"
[tool.coverage.run]
diff --git a/ruff.toml b/ruff.toml
index 83910e44ebf..a0f8e4fe74f 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -1,46 +1,59 @@
# Enable rules.
select = [
- "A", # flake8-builtins - https://beta.ruff.rs/docs/rules/#flake8-builtins-a
- "B", # flake8-bugbear-b - https://beta.ruff.rs/docs/rules/#flake8-bugbear-b
- "C4", # flake8-comprehensions - https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4
+ "A", # flake8-builtins - https://beta.ruff.rs/docs/rules/#flake8-builtins-a
+ "B", # flake8-bugbear-b - https://beta.ruff.rs/docs/rules/#flake8-bugbear-b
+ "C4", # flake8-comprehensions - https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4
"C90", # mccabe - https://beta.ruff.rs/docs/rules/#mccabe-c90
"COM", # flak8-commas - https://beta.ruff.rs/docs/rules/#flake8-commas-com
#"D", # pydocstyle - not enabled temporarily
- "E", # pycodestyle error - https://beta.ruff.rs/docs/rules/#error-e
+ "E", # pycodestyle error - https://beta.ruff.rs/docs/rules/#error-e
"ERA", # flake8-eradicate - https://beta.ruff.rs/docs/rules/#eradicate-era
- "FA", # flake8-future-annotations - https://beta.ruff.rs/docs/rules/#flake8-future-annotations-fa
+ "FA", # flake8-future-annotations - https://beta.ruff.rs/docs/rules/#flake8-future-annotations-fa
"FIX", # flake8-fixme - https://beta.ruff.rs/docs/rules/#flake8-fixme-fix
- "F", # pyflakes - https://beta.ruff.rs/docs/rules/#pyflakes-f
- "I", # isort - https://beta.ruff.rs/docs/rules/#isort-i
+ "F", # pyflakes - https://beta.ruff.rs/docs/rules/#pyflakes-f
+ "I", # isort - https://beta.ruff.rs/docs/rules/#isort-i
"ICN", # flake8-import-conventions - https://beta.ruff.rs/docs/rules/#flake8-import-conventions-icn
"ISC", # flake8-implicit-str-concat - https://beta.ruff.rs/docs/rules/#flake8-implicit-str-concat-isc
"PLE", # pylint error - https://beta.ruff.rs/docs/rules/#error-ple
"PLC", # pylint convention - https://beta.ruff.rs/docs/rules/#convention-plc
"PLR", # pylint refactoring - https://beta.ruff.rs/docs/rules/#refactor-plr
"PLW", # pylint warning - https://beta.ruff.rs/docs/rules/#warning-plw
- "PL", # pylint - https://beta.ruff.rs/docs/rules/#pylint-pl
+ "PL", # pylint - https://beta.ruff.rs/docs/rules/#pylint-pl
"PYI", # flake8-pyi - https://beta.ruff.rs/docs/rules/#flake8-pyi-pyi
- "Q", # flake8-quotes - https://beta.ruff.rs/docs/rules/#flake8-quotes-q
+ "Q", # flake8-quotes - https://beta.ruff.rs/docs/rules/#flake8-quotes-q
"PTH", # flake8-use-pathlib - https://beta.ruff.rs/docs/rules/#flake8-use-pathlib-pth
"T10", # flake8-debugger https://beta.ruff.rs/docs/rules/#flake8-debugger-t10
- "TD", # flake8-todo - https://beta.ruff.rs/docs/rules/#flake8-todos-td
- "W" # pycodestyle warning - https://beta.ruff.rs/docs/rules/#warning-w
+ "TD", # flake8-todo - https://beta.ruff.rs/docs/rules/#flake8-todos-td
+ "W", # pycodestyle warning - https://beta.ruff.rs/docs/rules/#warning-w
]
# Ignore specific rules
ignore = [
- "W291", # https://beta.ruff.rs/docs/rules/trailing-whitespace/
+ "W291", # https://beta.ruff.rs/docs/rules/trailing-whitespace/
"PLR0913", # https://beta.ruff.rs/docs/rules/too-many-arguments/
"PLR2004", #https://beta.ruff.rs/docs/rules/magic-value-comparison/
"PLW0603", #https://beta.ruff.rs/docs/rules/global-statement/
- "B904", # raise-without-from-inside-except - disabled temporarily
+ "B904", # raise-without-from-inside-except - disabled temporarily
"PLC1901", # Compare-to-empty-string - disabled temporarily
"PYI024",
- "FA100" # Enable this rule when drop support to Python 3.7
- ]
+ "FA100", # Enable this rule when drop support to Python 3.7
+]
# Exclude files and directories
-exclude = ["docs", ".eggs", "setup.py", "example", ".aws-sam", ".git", "dist", ".md", ".yaml", "example/samconfig.toml", ".txt", ".ini"]
+exclude = [
+ "docs",
+ ".eggs",
+ "setup.py",
+ "example",
+ ".aws-sam",
+ ".git",
+ "dist",
+ ".md",
+ ".yaml",
+ "example/samconfig.toml",
+ ".txt",
+ ".ini",
+]
# Maximum line length
line-length = 120
@@ -50,7 +63,10 @@ fix = true
fixable = ["I", "COM812", "W"]
# See: https://github.com/astral-sh/ruff/issues/128
-typing-modules = ["aws_lambda_powertools.utilities.parser.types"]
+typing-modules = [
+ "aws_lambda_powertools.utilities.parser.types",
+ "aws_lambda_powertools.shared.types",
+]
[mccabe]
# Maximum cyclomatic complexity
diff --git a/tests/events/secretsManagerEvent.json b/tests/events/secretsManagerEvent.json
new file mode 100644
index 00000000000..f07ea1e0b03
--- /dev/null
+++ b/tests/events/secretsManagerEvent.json
@@ -0,0 +1,5 @@
+{
+ "SecretId":"arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3",
+ "ClientRequestToken":"550e8400-e29b-41d4-a716-446655440000",
+ "Step":"createSecret"
+}
\ No newline at end of file
diff --git a/tests/unit/data_classes/test_kinesis_firehose_response.py b/tests/unit/data_classes/test_kinesis_firehose_response.py
new file mode 100644
index 00000000000..0be8d0d3ec0
--- /dev/null
+++ b/tests/unit/data_classes/test_kinesis_firehose_response.py
@@ -0,0 +1,115 @@
+from aws_lambda_powertools.utilities.data_classes import (
+ KinesisFirehoseDataTransformationRecord,
+ KinesisFirehoseDataTransformationRecordMetadata,
+ KinesisFirehoseDataTransformationResponse,
+ KinesisFirehoseEvent,
+)
+from aws_lambda_powertools.utilities.serialization import base64_encode, base64_from_str
+from tests.functional.utils import load_event
+
+
+def test_kinesis_firehose_response_metadata():
+ # When we create metadata with partition keys and attach to a firehose response record
+ metadata_partition = KinesisFirehoseDataTransformationRecordMetadata(partition_keys={"year": "2023"})
+
+ processed_record = KinesisFirehoseDataTransformationRecord(
+ record_id="test_id",
+ metadata=metadata_partition,
+ data="",
+ )
+ # Then we should have partition keys available in metadata field with same value
+ assert processed_record.metadata.partition_keys["year"] == "2023"
+ assert metadata_partition.asdict() == {"partitionKeys": {"year": "2023"}}
+
+
+def test_kinesis_firehose_response():
+ # GIVEN a Kinesis Firehose Event with two records
+ raw_event = load_event("kinesisFirehoseKinesisEvent.json")
+ parsed_event = KinesisFirehoseEvent(data=raw_event)
+
+ # WHEN we create a Data Transformation Response without changing the data
+ response = KinesisFirehoseDataTransformationResponse()
+ for record in parsed_event.records:
+ processed_record = KinesisFirehoseDataTransformationRecord(
+ record_id=record.record_id,
+ data=record.data,
+ )
+ response.add_record(record=processed_record)
+
+ # THEN we should have the same record data
+ record_01, record_02 = response.records[0], response.records[1]
+ raw_record_01, raw_record_02 = raw_event["records"][0], raw_event["records"][1]
+
+ assert len(response.records) == 2
+
+ assert record_01.result == "Ok"
+ assert record_02.result == "Ok"
+
+ assert record_01.record_id == raw_record_01["recordId"]
+ assert record_02.record_id == raw_record_02["recordId"]
+
+ assert record_01.data == raw_record_01["data"]
+ assert record_02.data == raw_record_02["data"]
+
+
+def test_kinesis_firehose_response_asdict():
+ # GIVEN the following example response provided by Firehose
+ sample_response = {
+ "records": [
+ {"recordId": "sample_record", "data": "", "result": "Ok", "metadata": {"partitionKeys": {"year": "2023"}}},
+ ],
+ }
+
+ response = KinesisFirehoseDataTransformationResponse()
+ metadata_partition = KinesisFirehoseDataTransformationRecordMetadata(
+ partition_keys=sample_response["records"][0]["metadata"]["partitionKeys"],
+ )
+
+ # WHEN we create a transformation record with the exact same data
+ processed_record = KinesisFirehoseDataTransformationRecord(
+ record_id=sample_response["records"][0]["recordId"],
+ data=sample_response["records"][0]["data"],
+ result=sample_response["records"][0]["result"],
+ metadata=metadata_partition,
+ )
+
+ # THEN serialized response should return the same value
+ response.add_record(record=processed_record)
+ assert response.asdict() == sample_response
+
+
+def test_kinesis_firehose_create_response():
+ # GIVEN a Kinesis Firehose Event with two records
+ raw_event = load_event("kinesisFirehoseKinesisEvent.json")
+ parsed_event = KinesisFirehoseEvent(data=raw_event)
+
+ # WHEN we create a Data Transformation Response changing the data
+ # WHEN we add partitions keys
+
+ arbitrary_data = "arbitrary data"
+
+ response = KinesisFirehoseDataTransformationResponse()
+ for record in parsed_event.records:
+ metadata_partition = KinesisFirehoseDataTransformationRecordMetadata(partition_keys={"year": "2023"})
+ processed_record = record.build_data_transformation_response(
+ metadata=metadata_partition,
+ data=base64_from_str(arbitrary_data),
+ )
+ response.add_record(record=processed_record)
+
+ # THEN we should have the same record data
+ record_01, record_02 = response.records[0], response.records[1]
+ raw_record_01, raw_record_02 = raw_event["records"][0], raw_event["records"][1]
+
+ assert len(response.records) == 2
+
+ assert record_01.result == "Ok"
+ assert record_02.result == "Ok"
+
+ assert record_01.record_id == raw_record_01["recordId"]
+ assert record_02.record_id == raw_record_02["recordId"]
+
+ assert record_01.data == base64_encode(arbitrary_data)
+ assert record_02.data == base64_encode(arbitrary_data)
+
+ assert record_01.metadata.partition_keys["year"] == "2023"
diff --git a/tests/unit/data_classes/test_secrets_manager_event.py b/tests/unit/data_classes/test_secrets_manager_event.py
new file mode 100644
index 00000000000..6bba952aa9b
--- /dev/null
+++ b/tests/unit/data_classes/test_secrets_manager_event.py
@@ -0,0 +1,12 @@
+from aws_lambda_powertools.utilities.data_classes.secrets_manager_event import SecretsManagerEvent
+from tests.functional.utils import load_event
+
+
+def test_secrets_manager_event():
+ raw_event = load_event("secretsManagerEvent.json")
+ parsed_event = SecretsManagerEvent(raw_event)
+
+ assert parsed_event.secret_id == raw_event["SecretId"]
+ assert parsed_event.client_request_token == raw_event["ClientRequestToken"]
+ assert parsed_event.version_id == raw_event["ClientRequestToken"]
+ assert parsed_event.step == raw_event["Step"]