diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index f34d71218b4..3fc4b17ec04 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -28,7 +28,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml
index 56e2cdb1ce2..b0e0aa9bc78 100644
--- a/.github/workflows/dependency-review.yml
+++ b/.github/workflows/dependency-review.yml
@@ -17,6 +17,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: 'Checkout Repository'
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
- name: 'Dependency Review'
uses: actions/dependency-review-action@6c5ccdad469c9f8a2996bfecaec55a631a347034 # v3.1.0
diff --git a/.github/workflows/label_pr_on_title.yml b/.github/workflows/label_pr_on_title.yml
index 747946bf4f4..f4372931ea1 100644
--- a/.github/workflows/label_pr_on_title.yml
+++ b/.github/workflows/label_pr_on_title.yml
@@ -50,7 +50,7 @@ jobs:
pull-requests: write # label respective PR
steps:
- name: Checkout repository
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
- name: "Label PR based on title"
uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1
env:
diff --git a/.github/workflows/on_label_added.yml b/.github/workflows/on_label_added.yml
index ed21d851d35..ebbb9f69e2f 100644
--- a/.github/workflows/on_label_added.yml
+++ b/.github/workflows/on_label_added.yml
@@ -47,7 +47,7 @@ jobs:
permissions:
pull-requests: write # comment on PR
steps:
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
# Maintenance: Persist state per PR as an artifact to avoid spam on label add
- name: "Suggest split large Pull Request"
uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1
diff --git a/.github/workflows/on_merged_pr.yml b/.github/workflows/on_merged_pr.yml
index 0c67cea3384..26dd705cbc0 100644
--- a/.github/workflows/on_merged_pr.yml
+++ b/.github/workflows/on_merged_pr.yml
@@ -49,7 +49,7 @@ jobs:
issues: write # label issue with pending-release
if: needs.get_pr_details.outputs.prIsMerged == 'true'
steps:
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
- name: "Label PR related issue for release"
uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1
env:
diff --git a/.github/workflows/on_opened_pr.yml b/.github/workflows/on_opened_pr.yml
index f83aa8133d3..771dc03b7fa 100644
--- a/.github/workflows/on_opened_pr.yml
+++ b/.github/workflows/on_opened_pr.yml
@@ -47,7 +47,7 @@ jobs:
needs: get_pr_details
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
- name: "Ensure related issue is present"
uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1
env:
@@ -66,7 +66,7 @@ jobs:
permissions:
pull-requests: write # label and comment on PR if missing acknowledge section (requirement)
steps:
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
- name: "Ensure acknowledgement section is present"
uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1
env:
diff --git a/.github/workflows/ossf_scorecard.yml b/.github/workflows/ossf_scorecard.yml
index d5746559804..d602d1846bc 100644
--- a/.github/workflows/ossf_scorecard.yml
+++ b/.github/workflows/ossf_scorecard.yml
@@ -22,12 +22,12 @@ jobs:
steps:
- name: "Checkout code"
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
persist-credentials: false
- name: "Run analysis"
- uses: ossf/scorecard-action@08b4669551908b1024bb425080c797723083c031 # v2.2.0
+ uses: ossf/scorecard-action@483ef80eb98fb506c348f7d62e28055e49fe2398 # v2.3.0
with:
results_file: results.sarif
results_format: sarif
diff --git a/.github/workflows/publish_v2_layer.yml b/.github/workflows/publish_v2_layer.yml
index 507ca9db843..b7b89e6d60f 100644
--- a/.github/workflows/publish_v2_layer.yml
+++ b/.github/workflows/publish_v2_layer.yml
@@ -88,7 +88,7 @@ jobs:
working-directory: ./layer
steps:
- name: checkout
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -105,7 +105,7 @@ jobs:
with:
node-version: "16.12"
- name: Setup python
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
+ uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1
with:
python-version: "3.11"
cache: "pip"
@@ -247,7 +247,7 @@ jobs:
pages: none
steps:
- name: Checkout repository # reusable workflows start clean, so we need to checkout again
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
ref: ${{ env.RELEASE_COMMIT }}
diff --git a/.github/workflows/quality_check.yml b/.github/workflows/quality_check.yml
index 575cfa39d9a..f1dcab5730f 100644
--- a/.github/workflows/quality_check.yml
+++ b/.github/workflows/quality_check.yml
@@ -50,11 +50,11 @@ jobs:
permissions:
contents: read # checkout code only
steps:
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
- name: Install poetry
run: pipx install poetry
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
+ uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
diff --git a/.github/workflows/quality_check_pydanticv2.yml b/.github/workflows/quality_check_pydanticv2.yml
index a4362ec7519..fd8e00fc990 100644
--- a/.github/workflows/quality_check_pydanticv2.yml
+++ b/.github/workflows/quality_check_pydanticv2.yml
@@ -50,11 +50,11 @@ jobs:
permissions:
contents: read # checkout code only
steps:
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
- name: Install poetry
run: pipx install poetry
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
+ uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
diff --git a/.github/workflows/record_pr.yml b/.github/workflows/record_pr.yml
index 73e2fc36479..8b811ff9ec0 100644
--- a/.github/workflows/record_pr.yml
+++ b/.github/workflows/record_pr.yml
@@ -46,7 +46,7 @@ jobs:
permissions:
contents: read # NOTE: treat as untrusted location
steps:
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
- name: "Extract PR details"
uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1
with:
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 2fcf63cb2fb..62f689382db 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -80,7 +80,7 @@ jobs:
RELEASE_VERSION="${RELEASE_TAG_VERSION:1}"
echo "RELEASE_VERSION=${RELEASE_VERSION}" >> "$GITHUB_OUTPUT"
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -115,7 +115,7 @@ jobs:
contents: read
steps:
# NOTE: we need actions/checkout to configure git first (pre-commit hooks in make dev)
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -131,7 +131,7 @@ jobs:
- name: Install poetry
run: pipx install git+https://github.com/python-poetry/poetry@68b88e5390720a3dd84f02940ec5200bfce39ac6 # v1.5.0
- name: Set up Python
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
+ uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1
with:
python-version: "3.11"
cache: "poetry"
@@ -156,7 +156,7 @@ jobs:
attestation_hashes: ${{ steps.encoded_hash.outputs.attestation_hashes }}
steps:
# NOTE: we need actions/checkout to configure git first (pre-commit hooks in make dev)
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -169,7 +169,7 @@ jobs:
- name: Install poetry
run: pipx install git+https://github.com/python-poetry/poetry@68b88e5390720a3dd84f02940ec5200bfce39ac6 # v1.5.0
- name: Set up Python
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
+ uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1
with:
python-version: "3.11"
cache: "poetry"
@@ -225,7 +225,7 @@ jobs:
RELEASE_VERSION: ${{ needs.seal.outputs.RELEASE_VERSION }}
steps:
# NOTE: we need actions/checkout in order to use our local actions (e.g., ./.github/actions)
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -259,7 +259,7 @@ jobs:
contents: write
steps:
# NOTE: we need actions/checkout to authenticate and configure git first
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -303,7 +303,7 @@ jobs:
runs-on: ubuntu-latest
steps:
# NOTE: we need actions/checkout to authenticate and configure git first
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -357,7 +357,7 @@ jobs:
env:
RELEASE_VERSION: ${{ needs.seal.outputs.RELEASE_VERSION }}
steps:
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
ref: ${{ env.RELEASE_COMMIT }}
diff --git a/.github/workflows/reusable_deploy_v2_layer_stack.yml b/.github/workflows/reusable_deploy_v2_layer_stack.yml
index cf544c87e9a..4e817004f12 100644
--- a/.github/workflows/reusable_deploy_v2_layer_stack.yml
+++ b/.github/workflows/reusable_deploy_v2_layer_stack.yml
@@ -93,7 +93,7 @@ jobs:
- region: "ap-south-1"
has_arm64_support: "true"
- region: "ap-south-2"
- has_arm64_support: "false"
+ has_arm64_support: "true"
- region: "ap-southeast-1"
has_arm64_support: "true"
- region: "ap-southeast-2"
@@ -101,19 +101,19 @@ jobs:
- region: "ap-southeast-3"
has_arm64_support: "true"
- region: "ap-southeast-4"
- has_arm64_support: "false"
+ has_arm64_support: "true"
- region: "ca-central-1"
has_arm64_support: "true"
- region: "eu-central-1"
has_arm64_support: "true"
- region: "eu-central-2"
- has_arm64_support: "false"
+ has_arm64_support: "true"
- region: "eu-north-1"
has_arm64_support: "true"
- region: "eu-south-1"
has_arm64_support: "true"
- region: "eu-south-2"
- has_arm64_support: "false"
+ has_arm64_support: "true"
- region: "eu-west-1"
has_arm64_support: "true"
- region: "eu-west-2"
@@ -121,9 +121,9 @@ jobs:
- region: "eu-west-3"
has_arm64_support: "true"
- region: "il-central-1"
- has_arm64_support: "false"
+ has_arm64_support: "true"
- region: "me-central-1"
- has_arm64_support: "false"
+ has_arm64_support: "true"
- region: "me-south-1"
has_arm64_support: "true"
- region: "sa-east-1"
@@ -138,7 +138,7 @@ jobs:
has_arm64_support: "true"
steps:
- name: checkout
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -160,7 +160,7 @@ jobs:
with:
node-version: "16.12"
- name: Setup python
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
+ uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1
with:
python-version: "3.11"
cache: "pip"
diff --git a/.github/workflows/reusable_deploy_v2_sar.yml b/.github/workflows/reusable_deploy_v2_sar.yml
index 0b79b527947..e8f1dfd2a0d 100644
--- a/.github/workflows/reusable_deploy_v2_sar.yml
+++ b/.github/workflows/reusable_deploy_v2_sar.yml
@@ -79,7 +79,7 @@ jobs:
architecture: ["x86_64", "arm64"]
steps:
- name: checkout
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
ref: ${{ env.RELEASE_COMMIT }}
diff --git a/.github/workflows/reusable_export_pr_details.yml b/.github/workflows/reusable_export_pr_details.yml
index ad29d4c9bf1..53ca43a884b 100644
--- a/.github/workflows/reusable_export_pr_details.yml
+++ b/.github/workflows/reusable_export_pr_details.yml
@@ -76,7 +76,7 @@ jobs:
prLabels: ${{ steps.prLabels.outputs.prLabels }}
steps:
- name: Checkout repository # in case caller workflow doesn't checkout thus failing with file not found
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
- name: "Download previously saved PR"
uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1
env:
diff --git a/.github/workflows/reusable_publish_changelog.yml b/.github/workflows/reusable_publish_changelog.yml
index 7f74b55bf44..b8e2308ff8a 100644
--- a/.github/workflows/reusable_publish_changelog.yml
+++ b/.github/workflows/reusable_publish_changelog.yml
@@ -26,7 +26,7 @@ jobs:
pull-requests: write # create PR
steps:
- name: Checkout repository # reusable workflows start clean, so we need to checkout again
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
fetch-depth: 0
- name: "Generate latest changelog"
diff --git a/.github/workflows/reusable_publish_docs.yml b/.github/workflows/reusable_publish_docs.yml
index e84885d0372..43656b7e17a 100644
--- a/.github/workflows/reusable_publish_docs.yml
+++ b/.github/workflows/reusable_publish_docs.yml
@@ -44,14 +44,14 @@ jobs:
id-token: write # trade JWT token for AWS credentials in AWS Docs account
pages: write # uncomment if mike fails as we migrated to S3 hosting
steps:
- - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- name: Install poetry
run: pipx install poetry
- name: Set up Python
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
+ uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1
with:
python-version: "3.11"
cache: "poetry"
diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml
index 15a62febdaf..8306995a8ed 100644
--- a/.github/workflows/run-e2e-tests.yml
+++ b/.github/workflows/run-e2e-tests.yml
@@ -51,11 +51,11 @@ jobs:
if: ${{ github.actor != 'dependabot[bot]' && github.repository == 'aws-powertools/powertools-lambda-python' }}
steps:
- name: "Checkout"
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
- name: Install poetry
run: pipx install poetry
- name: "Use Python"
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
+ uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1
with:
python-version: ${{ matrix.version }}
architecture: "x64"
diff --git a/.github/workflows/secure_workflows.yml b/.github/workflows/secure_workflows.yml
index 23540ef83ff..279345cfc7b 100644
--- a/.github/workflows/secure_workflows.yml
+++ b/.github/workflows/secure_workflows.yml
@@ -30,7 +30,7 @@ jobs:
contents: read # checkout code and subsequently GitHub action workflows
steps:
- name: Checkout code
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
+ uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
- name: Ensure 3rd party workflows have SHA pinned
uses: zgosalvez/github-actions-ensure-sha-pinned-actions@f32435541e24cd6a4700a7f52bb2ec59e80603b1 # v2.1.4
with:
diff --git a/.gitignore b/.gitignore
index a69b4eaf618..2a814459161 100644
--- a/.gitignore
+++ b/.gitignore
@@ -252,6 +252,7 @@ dmypy.json
.pyre/
### VisualStudioCode ###
+.vscode
.vscode/*
!.vscode/tasks.json
!.vscode/launch.json
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6bd7dbaf7f6..0dffae83eb4 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,22 +4,79 @@
# Unreleased
-## Bug Fixes
+## Documentation
+* **contributing:** initial structure for revamped contributing guide ([#3133](https://github.com/aws-powertools/powertools-lambda-python/issues/3133))
+* **event_handler:** add information about case-insensitive header lookup function ([#3183](https://github.com/aws-powertools/powertools-lambda-python/issues/3183))
+
+## Features
+
+* **data_masking:** add new sensitive data masking utility ([#2197](https://github.com/aws-powertools/powertools-lambda-python/issues/2197))
+* **event_handler:** add support to VPC Lattice payload v2 ([#3153](https://github.com/aws-powertools/powertools-lambda-python/issues/3153))
+* **layers:** add arm64 support in more regions ([#3151](https://github.com/aws-powertools/powertools-lambda-python/issues/3151))
+* **logger:** new stack_trace field with rich exception details ([#3147](https://github.com/aws-powertools/powertools-lambda-python/issues/3147))
+* **parser:** infer model from type hint ([#3181](https://github.com/aws-powertools/powertools-lambda-python/issues/3181))
+
+## Maintenance
+
+* **deps:** bump gitpython from 3.1.35 to 3.1.37 in /docs ([#3188](https://github.com/aws-powertools/powertools-lambda-python/issues/3188))
+* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3174](https://github.com/aws-powertools/powertools-lambda-python/issues/3174))
+* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 3 updates ([#3198](https://github.com/aws-powertools/powertools-lambda-python/issues/3198))
+* **deps:** bump squidfunk/mkdocs-material from `b41ba6d` to `06673a1` in /docs ([#3124](https://github.com/aws-powertools/powertools-lambda-python/issues/3124))
+* **deps:** bump aws-xray-sdk from 2.12.0 to 2.12.1 ([#3197](https://github.com/aws-powertools/powertools-lambda-python/issues/3197))
+* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3127](https://github.com/aws-powertools/powertools-lambda-python/issues/3127))
+* **deps:** bump actions/checkout from 4.0.0 to 4.1.0 ([#3128](https://github.com/aws-powertools/powertools-lambda-python/issues/3128))
+* **deps:** bump urllib3 from 1.26.16 to 1.26.17 ([#3162](https://github.com/aws-powertools/powertools-lambda-python/issues/3162))
+* **deps:** bump squidfunk/mkdocs-material from `06673a1` to `e5f28aa` in /docs ([#3134](https://github.com/aws-powertools/powertools-lambda-python/issues/3134))
+* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 3 updates ([#3177](https://github.com/aws-powertools/powertools-lambda-python/issues/3177))
+* **deps:** bump actions/setup-python from 4.7.0 to 4.7.1 ([#3158](https://github.com/aws-powertools/powertools-lambda-python/issues/3158))
+* **deps:** bump squidfunk/mkdocs-material from `a4cfa88` to `cb38dc2` in /docs ([#3189](https://github.com/aws-powertools/powertools-lambda-python/issues/3189))
+* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3156](https://github.com/aws-powertools/powertools-lambda-python/issues/3156))
+* **deps:** bump squidfunk/mkdocs-material from `e5f28aa` to `cbfecae` in /docs ([#3157](https://github.com/aws-powertools/powertools-lambda-python/issues/3157))
+* **deps:** bump pydantic from 1.10.12 to 1.10.13 ([#3144](https://github.com/aws-powertools/powertools-lambda-python/issues/3144))
+* **deps:** bump fastjsonschema from 2.18.0 to 2.18.1 ([#3159](https://github.com/aws-powertools/powertools-lambda-python/issues/3159))
+* **deps:** bump squidfunk/mkdocs-material from `cbfecae` to `a4cfa88` in /docs ([#3175](https://github.com/aws-powertools/powertools-lambda-python/issues/3175))
+* **deps:** bump ossf/scorecard-action from 2.2.0 to 2.3.0 ([#3178](https://github.com/aws-powertools/powertools-lambda-python/issues/3178))
+* **deps-dev:** bump aws-cdk from 2.98.0 to 2.99.0 ([#3148](https://github.com/aws-powertools/powertools-lambda-python/issues/3148))
+* **deps-dev:** bump aws-cdk from 2.99.1 to 2.100.0 ([#3185](https://github.com/aws-powertools/powertools-lambda-python/issues/3185))
+* **deps-dev:** bump the boto-typing group with 2 updates ([#3143](https://github.com/aws-powertools/powertools-lambda-python/issues/3143))
+* **deps-dev:** bump aws-cdk from 2.99.0 to 2.99.1 ([#3155](https://github.com/aws-powertools/powertools-lambda-python/issues/3155))
+* **deps-dev:** bump aws-cdk from 2.97.0 to 2.98.0 ([#3139](https://github.com/aws-powertools/powertools-lambda-python/issues/3139))
+* **deps-dev:** bump aws-cdk from 2.96.2 to 2.97.0 ([#3129](https://github.com/aws-powertools/powertools-lambda-python/issues/3129))
+* **deps-dev:** bump types-requests from 2.31.0.3 to 2.31.0.5 ([#3136](https://github.com/aws-powertools/powertools-lambda-python/issues/3136))
+* **deps-dev:** bump the boto-typing group with 1 update ([#3135](https://github.com/aws-powertools/powertools-lambda-python/issues/3135))
+* **deps-dev:** bump ruff from 0.0.291 to 0.0.292 ([#3161](https://github.com/aws-powertools/powertools-lambda-python/issues/3161))
+* **deps-dev:** bump ruff from 0.0.290 to 0.0.291 ([#3126](https://github.com/aws-powertools/powertools-lambda-python/issues/3126))
+* **deps-dev:** bump urllib3 from 1.26.16 to 1.26.17 in /layer ([#3163](https://github.com/aws-powertools/powertools-lambda-python/issues/3163))
+* **deps-dev:** bump sentry-sdk from 1.31.0 to 1.32.0 ([#3192](https://github.com/aws-powertools/powertools-lambda-python/issues/3192))
+* **deps-dev:** bump the boto-typing group with 1 update ([#3196](https://github.com/aws-powertools/powertools-lambda-python/issues/3196))
+* **deps-dev:** bump cfn-lint from 0.80.3 to 0.80.4 ([#3166](https://github.com/aws-powertools/powertools-lambda-python/issues/3166))
+* **deps-dev:** bump cfn-lint from 0.80.2 to 0.80.3 ([#3125](https://github.com/aws-powertools/powertools-lambda-python/issues/3125))
+* **deps-dev:** bump cfn-lint from 0.80.4 to 0.81.0 ([#3179](https://github.com/aws-powertools/powertools-lambda-python/issues/3179))
+* **deps-dev:** bump the boto-typing group with 1 update ([#3170](https://github.com/aws-powertools/powertools-lambda-python/issues/3170))
+* **deps-dev:** bump types-requests from 2.31.0.5 to 2.31.0.6 ([#3145](https://github.com/aws-powertools/powertools-lambda-python/issues/3145))
+
+
+
+## [v2.25.1] - 2023-09-22
+## Bug Fixes
+
+* **logger:** add explicit None return type annotations ([#3113](https://github.com/aws-powertools/powertools-lambda-python/issues/3113))
* **metrics:** support additional arguments in functions wrapped with log_metrics decorator ([#3120](https://github.com/aws-powertools/powertools-lambda-python/issues/3120))
## Maintenance
-* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3115](https://github.com/aws-powertools/powertools-lambda-python/issues/3115))
+* version bump
* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3108](https://github.com/aws-powertools/powertools-lambda-python/issues/3108))
-* **deps:** bump squidfunk/mkdocs-material from `c4890ab` to `4ff781e` in /docs ([#3110](https://github.com/aws-powertools/powertools-lambda-python/issues/3110))
+* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3115](https://github.com/aws-powertools/powertools-lambda-python/issues/3115))
* **deps:** bump squidfunk/mkdocs-material from `4ff781e` to `b41ba6d` in /docs ([#3117](https://github.com/aws-powertools/powertools-lambda-python/issues/3117))
+* **deps:** bump squidfunk/mkdocs-material from `c4890ab` to `4ff781e` in /docs ([#3110](https://github.com/aws-powertools/powertools-lambda-python/issues/3110))
* **deps-dev:** bump ruff from 0.0.289 to 0.0.290 ([#3105](https://github.com/aws-powertools/powertools-lambda-python/issues/3105))
* **deps-dev:** bump aws-cdk from 2.96.1 to 2.96.2 ([#3102](https://github.com/aws-powertools/powertools-lambda-python/issues/3102))
-* **deps-dev:** bump types-requests from 2.31.0.2 to 2.31.0.3 ([#3114](https://github.com/aws-powertools/powertools-lambda-python/issues/3114))
-* **deps-dev:** bump the boto-typing group with 1 update ([#3101](https://github.com/aws-powertools/powertools-lambda-python/issues/3101))
* **deps-dev:** bump the boto-typing group with 3 updates ([#3118](https://github.com/aws-powertools/powertools-lambda-python/issues/3118))
+* **deps-dev:** bump the boto-typing group with 1 update ([#3101](https://github.com/aws-powertools/powertools-lambda-python/issues/3101))
* **deps-dev:** bump cfn-lint from 0.79.11 to 0.80.2 ([#3107](https://github.com/aws-powertools/powertools-lambda-python/issues/3107))
+* **deps-dev:** bump types-requests from 2.31.0.2 to 2.31.0.3 ([#3114](https://github.com/aws-powertools/powertools-lambda-python/issues/3114))
@@ -3826,7 +3883,8 @@
* Merge pull request [#5](https://github.com/aws-powertools/powertools-lambda-python/issues/5) from jfuss/feat/python38
-[Unreleased]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.25.0...HEAD
+[Unreleased]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.25.1...HEAD
+[v2.25.1]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.25.0...v2.25.1
[v2.25.0]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.24.0...v2.25.0
[v2.24.0]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.23.1...v2.24.0
[v2.23.1]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.23.0...v2.23.1
diff --git a/Makefile b/Makefile
index 5bfbf031949..d52d0b46cbb 100644
--- a/Makefile
+++ b/Makefile
@@ -6,14 +6,15 @@ target:
dev:
pip install --upgrade pip pre-commit poetry
+ poetry config --local virtualenvs.in-project true
@$(MAKE) dev-version-plugin
- poetry install --extras "all"
+ poetry install --extras "all datamasking-aws-sdk"
pre-commit install
dev-gitpod:
pip install --upgrade pip poetry
@$(MAKE) dev-version-plugin
- poetry install --extras "all"
+ poetry install --extras "all datamasking-aws-sdk"
pre-commit install
format:
diff --git a/aws_lambda_powertools/event_handler/__init__.py b/aws_lambda_powertools/event_handler/__init__.py
index 85298cfc15c..7bdd9a97f72 100644
--- a/aws_lambda_powertools/event_handler/__init__.py
+++ b/aws_lambda_powertools/event_handler/__init__.py
@@ -14,7 +14,7 @@
from aws_lambda_powertools.event_handler.lambda_function_url import (
LambdaFunctionUrlResolver,
)
-from aws_lambda_powertools.event_handler.vpc_lattice import VPCLatticeResolver
+from aws_lambda_powertools.event_handler.vpc_lattice import VPCLatticeResolver, VPCLatticeV2Resolver
__all__ = [
"AppSyncResolver",
@@ -26,4 +26,5 @@
"LambdaFunctionUrlResolver",
"Response",
"VPCLatticeResolver",
+ "VPCLatticeV2Resolver",
]
diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py
index 2163d7d762e..46cb5587135 100644
--- a/aws_lambda_powertools/event_handler/api_gateway.py
+++ b/aws_lambda_powertools/event_handler/api_gateway.py
@@ -22,6 +22,7 @@
APIGatewayProxyEventV2,
LambdaFunctionUrlEvent,
VPCLatticeEvent,
+ VPCLatticeEventV2,
)
from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent
from aws_lambda_powertools.utilities.typing import LambdaContext
@@ -43,6 +44,7 @@ class ProxyEventType(Enum):
APIGatewayProxyEventV2 = "APIGatewayProxyEventV2"
ALBEvent = "ALBEvent"
VPCLatticeEvent = "VPCLatticeEvent"
+ VPCLatticeEventV2 = "VPCLatticeEventV2"
LambdaFunctionUrlEvent = "LambdaFunctionUrlEvent"
@@ -999,6 +1001,9 @@ def _to_proxy_event(self, event: Dict) -> BaseProxyEvent:
if self._proxy_type == ProxyEventType.VPCLatticeEvent:
logger.debug("Converting event to VPC Lattice contract")
return VPCLatticeEvent(event)
+ if self._proxy_type == ProxyEventType.VPCLatticeEventV2:
+ logger.debug("Converting event to VPC LatticeV2 contract")
+ return VPCLatticeEventV2(event)
logger.debug("Converting event to ALB contract")
return ALBEvent(event)
diff --git a/aws_lambda_powertools/event_handler/vpc_lattice.py b/aws_lambda_powertools/event_handler/vpc_lattice.py
index b3cb042b40b..bcee046e382 100644
--- a/aws_lambda_powertools/event_handler/vpc_lattice.py
+++ b/aws_lambda_powertools/event_handler/vpc_lattice.py
@@ -5,7 +5,7 @@
ApiGatewayResolver,
ProxyEventType,
)
-from aws_lambda_powertools.utilities.data_classes import VPCLatticeEvent
+from aws_lambda_powertools.utilities.data_classes import VPCLatticeEvent, VPCLatticeEventV2
class VPCLatticeResolver(ApiGatewayResolver):
@@ -51,3 +51,48 @@ def __init__(
):
"""Amazon VPC Lattice resolver"""
super().__init__(ProxyEventType.VPCLatticeEvent, cors, debug, serializer, strip_prefixes)
+
+
+class VPCLatticeV2Resolver(ApiGatewayResolver):
+ """VPC Lattice resolver
+
+ Documentation:
+ - https://docs.aws.amazon.com/lambda/latest/dg/services-vpc-lattice.html
+ - https://docs.aws.amazon.com/lambda/latest/dg/services-vpc-lattice.html#vpc-lattice-receiving-events
+
+ Examples
+ --------
+ Simple example integrating with Tracer
+
+ ```python
+ from aws_lambda_powertools import Tracer
+ from aws_lambda_powertools.event_handler import VPCLatticeV2Resolver
+
+ tracer = Tracer()
+ app = VPCLatticeV2Resolver()
+
+ @app.get("/get-call")
+ def simple_get():
+ return {"message": "Foo"}
+
+ @app.post("/post-call")
+ def simple_post():
+ post_data: dict = app.current_event.json_body
+ return {"message": post_data}
+
+ @tracer.capture_lambda_handler
+ def lambda_handler(event, context):
+ return app.resolve(event, context)
+ """
+
+ current_event: VPCLatticeEventV2
+
+ def __init__(
+ self,
+ cors: Optional[CORSConfig] = None,
+ debug: Optional[bool] = None,
+ serializer: Optional[Callable[[Dict], str]] = None,
+ strip_prefixes: Optional[List[Union[str, Pattern]]] = None,
+ ):
+ """Amazon VPC Lattice resolver"""
+ super().__init__(ProxyEventType.VPCLatticeEventV2, cors, debug, serializer, strip_prefixes)
diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py
index cf0a7598051..22419b160d1 100644
--- a/aws_lambda_powertools/logging/formatter.py
+++ b/aws_lambda_powertools/logging/formatter.py
@@ -5,12 +5,13 @@
import logging
import os
import time
+import traceback
from abc import ABCMeta, abstractmethod
from datetime import datetime, timezone
from functools import partial
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union
-from aws_lambda_powertools.logging.types import LogRecord
+from aws_lambda_powertools.logging.types import LogRecord, LogStackTrace
from aws_lambda_powertools.shared import constants
from aws_lambda_powertools.shared.functions import powertools_dev_is_set
@@ -77,6 +78,7 @@ def __init__(
log_record_order: List[str] | None = None,
utc: bool = False,
use_rfc3339: bool = False,
+ serialize_stacktrace: bool = True,
**kwargs,
) -> None:
"""Return a LambdaPowertoolsFormatter instance.
@@ -144,10 +146,14 @@ def __init__(
if self.utc:
self.converter = time.gmtime
+ else:
+ self.converter = time.localtime
self.keys_combined = {**self._build_default_keys(), **kwargs}
self.log_format.update(**self.keys_combined)
+ self.serialize_stacktrace = serialize_stacktrace
+
super().__init__(datefmt=self.datefmt)
def serialize(self, log: LogRecord) -> str:
@@ -158,11 +164,15 @@ def format(self, record: logging.LogRecord) -> str: # noqa: A003
"""Format logging record as structured JSON str"""
formatted_log = self._extract_log_keys(log_record=record)
formatted_log["message"] = self._extract_log_message(log_record=record)
+
# exception and exception_name fields can be added as extra key
# in any log level, we try to extract and use them first
extracted_exception, extracted_exception_name = self._extract_log_exception(log_record=record)
formatted_log["exception"] = formatted_log.get("exception", extracted_exception)
formatted_log["exception_name"] = formatted_log.get("exception_name", extracted_exception_name)
+ if self.serialize_stacktrace:
+ # Generate the traceback from the traceback library
+ formatted_log["stack_trace"] = self._serialize_stacktrace(log_record=record)
formatted_log["xray_trace_id"] = self._get_latest_trace_id()
formatted_log = self._strip_none_records(records=formatted_log)
@@ -273,6 +283,24 @@ def _extract_log_message(self, log_record: logging.LogRecord) -> Union[Dict[str,
return message
+ def _serialize_stacktrace(self, log_record: logging.LogRecord) -> LogStackTrace | None:
+ if log_record.exc_info:
+ exception_info: LogStackTrace = {
+ "type": log_record.exc_info[0].__name__, # type: ignore
+ "value": log_record.exc_info[1], # type: ignore
+ "module": log_record.exc_info[1].__class__.__module__,
+ "frames": [],
+ }
+
+ exception_info["frames"] = [
+ {"file": fs.filename, "line": fs.lineno, "function": fs.name, "statement": fs.line}
+ for fs in traceback.extract_tb(log_record.exc_info[2])
+ ]
+
+ return exception_info
+
+ return None
+
def _extract_log_exception(self, log_record: logging.LogRecord) -> Union[Tuple[str, str], Tuple[None, None]]:
"""Format traceback information, if available
diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py
index 45f8bc80721..28d899d25e5 100644
--- a/aws_lambda_powertools/logging/logger.py
+++ b/aws_lambda_powertools/logging/logger.py
@@ -220,6 +220,7 @@ def __init__(
log_record_order: Optional[List[str]] = None,
utc: bool = False,
use_rfc3339: bool = False,
+ serialize_stacktrace: bool = True,
**kwargs,
) -> None:
self.service = resolve_env_var_choice(
@@ -253,6 +254,7 @@ def __init__(
"log_record_order": log_record_order,
"utc": utc,
"use_rfc3339": use_rfc3339,
+ "serialize_stacktrace": serialize_stacktrace,
}
self._init_logger(formatter_options=formatter_options, log_level=level, **kwargs)
diff --git a/aws_lambda_powertools/logging/types.py b/aws_lambda_powertools/logging/types.py
index d166b2e023c..eb2b39afe69 100644
--- a/aws_lambda_powertools/logging/types.py
+++ b/aws_lambda_powertools/logging/types.py
@@ -5,6 +5,7 @@
from aws_lambda_powertools.shared.types import NotRequired, TypeAlias, TypedDict
LogRecord: TypeAlias = Union[Dict[str, Any], "PowertoolsLogRecord"]
+LogStackTrace: TypeAlias = Union[Dict[str, Any], "PowertoolsStackTrace"]
class PowertoolsLogRecord(TypedDict):
@@ -33,3 +34,11 @@ class PowertoolsLogRecord(TypedDict):
# Fields from logger.exception
exception_name: NotRequired[str]
exception: NotRequired[str]
+ stack_trace: NotRequired[Dict[str, Any]]
+
+
+class PowertoolsStackTrace(TypedDict):
+ type: str
+ value: str
+ module: str
+ frames: List[Dict[str, Any]]
diff --git a/aws_lambda_powertools/shared/version.py b/aws_lambda_powertools/shared/version.py
index d4d652c19b7..8326dad5cfe 100644
--- a/aws_lambda_powertools/shared/version.py
+++ b/aws_lambda_powertools/shared/version.py
@@ -1,3 +1,3 @@
"""Exposes version constant to avoid circular dependencies."""
-VERSION = "2.25.0"
+VERSION = "2.26.0"
diff --git a/aws_lambda_powertools/utilities/_data_masking/__init__.py b/aws_lambda_powertools/utilities/_data_masking/__init__.py
new file mode 100644
index 00000000000..806c856ba75
--- /dev/null
+++ b/aws_lambda_powertools/utilities/_data_masking/__init__.py
@@ -0,0 +1,11 @@
+"""
+ Note: This utility is currently in a Non-General Availability (Non-GA) phase and may have limitations.
+ Please DON'T USE THIS utility in production environments.
+ Keep in mind that when we transition to General Availability (GA), there might be breaking changes introduced.
+"""
+
+from aws_lambda_powertools.utilities._data_masking.base import DataMasking
+
+__all__ = [
+ "DataMasking",
+]
diff --git a/aws_lambda_powertools/utilities/_data_masking/base.py b/aws_lambda_powertools/utilities/_data_masking/base.py
new file mode 100644
index 00000000000..211e44c3759
--- /dev/null
+++ b/aws_lambda_powertools/utilities/_data_masking/base.py
@@ -0,0 +1,174 @@
+import json
+from typing import Optional, Union
+
+from aws_lambda_powertools.utilities._data_masking.provider import BaseProvider
+
+
+class DataMasking:
+ """
+ Note: This utility is currently in a Non-General Availability (Non-GA) phase and may have limitations.
+ Please DON'T USE THIS utility in production environments.
+ Keep in mind that when we transition to General Availability (GA), there might be breaking changes introduced.
+
+ A utility class for masking sensitive data within various data types.
+
+ This class provides methods for masking sensitive information, such as personal
+ identifiers or confidential data, within different data types such as strings,
+ dictionaries, lists, and more. It helps protect sensitive information while
+ preserving the structure of the original data.
+
+ Usage:
+ Instantiate an object of this class and use its methods to mask sensitive data
+ based on the data type. Supported data types include strings, dictionaries,
+ and more.
+
+ Example:
+ ```
+ from aws_lambda_powertools.utilities.data_masking.base import DataMasking
+
+ def lambda_handler(event, context):
+ masker = DataMasking()
+
+ data = {
+ "project": "powertools",
+ "sensitive": "xxxxxxxxxx"
+ }
+
+ masked = masker.mask(data,fields=["sensitive"])
+
+ return masked
+
+ ```
+ """
+
+ def __init__(self, provider: Optional[BaseProvider] = None):
+ self.provider = provider or BaseProvider()
+
+ def encrypt(self, data, fields=None, **provider_options):
+ return self._apply_action(data, fields, self.provider.encrypt, **provider_options)
+
+ def decrypt(self, data, fields=None, **provider_options):
+ return self._apply_action(data, fields, self.provider.decrypt, **provider_options)
+
+ def mask(self, data, fields=None, **provider_options):
+ return self._apply_action(data, fields, self.provider.mask, **provider_options)
+
+ def _apply_action(self, data, fields, action, **provider_options):
+ """
+ Helper method to determine whether to apply a given action to the entire input data
+ or to specific fields if the 'fields' argument is specified.
+
+ Parameters
+ ----------
+ data : any
+ The input data to process.
+ fields : Optional[List[any]] = None
+ A list of fields to apply the action to. If 'None', the action is applied to the entire 'data'.
+ action : Callable
+ The action to apply to the data. It should be a callable that performs an operation on the data
+ and returns the modified value.
+
+ Returns
+ -------
+ any
+ The modified data after applying the action.
+ """
+
+ if fields is not None:
+ return self._apply_action_to_fields(data, fields, action, **provider_options)
+ else:
+ return action(data, **provider_options)
+
+ def _apply_action_to_fields(
+ self,
+ data: Union[dict, str],
+ fields: list,
+ action,
+ **provider_options,
+ ) -> Union[dict, str]:
+ """
+ This method takes the input data, which can be either a dictionary or a JSON string,
+ and applies a mask, an encryption, or a decryption to the specified fields.
+
+ Parameters
+ ----------
+ data : Union[dict, str])
+ The input data to process. It can be either a dictionary or a JSON string.
+ fields : List
+ A list of fields to apply the action to. Each field can be specified as a string or
+ a list of strings representing nested keys in the dictionary.
+ action : Callable
+ The action to apply to the fields. It should be a callable that takes the current
+ value of the field as the first argument and any additional arguments that might be required
+ for the action. It performs an operation on the current value using the provided arguments and
+ returns the modified value.
+ **provider_options:
+ Additional keyword arguments to pass to the 'action' function.
+
+ Returns
+ -------
+ dict
+ The modified dictionary after applying the action to the
+ specified fields.
+
+ Raises
+ -------
+ ValueError
+ If 'fields' parameter is None.
+ TypeError
+ If the 'data' parameter is not a traversable type
+
+ Example
+ -------
+ ```python
+ >>> data = {'a': {'b': {'c': 1}}, 'x': {'y': 2}}
+ >>> fields = ['a.b.c', 'a.x.y']
+ # The function will transform the value at 'a.b.c' (1) and 'a.x.y' (2)
+ # and store the result as:
+ new_dict = {'a': {'b': {'c': 'transformed_value'}}, 'x': {'y': 'transformed_value'}}
+ ```
+ """
+
+ if fields is None:
+ raise ValueError("No fields specified.")
+
+ if isinstance(data, str):
+ # Parse JSON string as dictionary
+ my_dict_parsed = json.loads(data)
+ elif isinstance(data, dict):
+ # In case their data has keys that are not strings (i.e. ints), convert it all into a JSON string
+ my_dict_parsed = json.dumps(data)
+ # Turn back into dict so can parse it
+ my_dict_parsed = json.loads(my_dict_parsed)
+ else:
+ raise TypeError(
+ f"Unsupported data type for 'data' parameter. Expected a traversable type, but got {type(data)}.",
+ )
+
+ # For example: ['a.b.c'] in ['a.b.c', 'a.x.y']
+ for nested_key in fields:
+ # Prevent overriding loop variable
+ curr_nested_key = nested_key
+
+ # If the nested_key is not a string, convert it to a string representation
+ if not isinstance(curr_nested_key, str):
+ curr_nested_key = json.dumps(curr_nested_key)
+
+ # Split the nested key string into a list of nested keys
+ # ['a.b.c'] -> ['a', 'b', 'c']
+ keys = curr_nested_key.split(".")
+
+ # Initialize a current dictionary to the root dictionary
+ curr_dict = my_dict_parsed
+
+ # Traverse the dictionary hierarchy by iterating through the list of nested keys
+ for key in keys[:-1]:
+ curr_dict = curr_dict[key]
+
+ # Retrieve the final value of the nested field
+ valtochange = curr_dict[(keys[-1])]
+
+ # Apply the specified 'action' to the target value
+ curr_dict[keys[-1]] = action(valtochange, **provider_options)
+
+ return my_dict_parsed
diff --git a/aws_lambda_powertools/utilities/_data_masking/constants.py b/aws_lambda_powertools/utilities/_data_masking/constants.py
new file mode 100644
index 00000000000..47e74f472cf
--- /dev/null
+++ b/aws_lambda_powertools/utilities/_data_masking/constants.py
@@ -0,0 +1,5 @@
+DATA_MASKING_STRING: str = "*****"
+CACHE_CAPACITY: int = 100
+MAX_CACHE_AGE_SECONDS: float = 300.0
+MAX_MESSAGES_ENCRYPTED: int = 200
+# NOTE: You can also set max messages/bytes per data key
diff --git a/aws_lambda_powertools/utilities/_data_masking/provider/__init__.py b/aws_lambda_powertools/utilities/_data_masking/provider/__init__.py
new file mode 100644
index 00000000000..7ee07f964b1
--- /dev/null
+++ b/aws_lambda_powertools/utilities/_data_masking/provider/__init__.py
@@ -0,0 +1,5 @@
+from aws_lambda_powertools.utilities._data_masking.provider.base import BaseProvider
+
+__all__ = [
+ "BaseProvider",
+]
diff --git a/aws_lambda_powertools/utilities/_data_masking/provider/base.py b/aws_lambda_powertools/utilities/_data_masking/provider/base.py
new file mode 100644
index 00000000000..a293c6aff9a
--- /dev/null
+++ b/aws_lambda_powertools/utilities/_data_masking/provider/base.py
@@ -0,0 +1,34 @@
+import json
+from typing import Any
+
+from aws_lambda_powertools.utilities._data_masking.constants import DATA_MASKING_STRING
+
+
+class BaseProvider:
+ """
+ When you try to create an instance of a subclass that does not implement the encrypt method,
+ you will get a NotImplementedError with a message that says the method is not implemented:
+ """
+
+ def __init__(self, json_serializer=None, json_deserializer=None) -> None:
+ self.json_serializer = json_serializer or self.default_json_serializer
+ self.json_deserializer = json_deserializer or self.default_json_deserializer
+
+ def default_json_serializer(self, data):
+ return json.dumps(data).encode("utf-8")
+
+ def default_json_deserializer(self, data):
+ return json.loads(data.decode("utf-8"))
+
+ def encrypt(self, data) -> str:
+ raise NotImplementedError("Subclasses must implement encrypt()")
+
+ def decrypt(self, data) -> Any:
+ raise NotImplementedError("Subclasses must implement decrypt()")
+
+ def mask(self, data) -> Any:
+ if isinstance(data, (str, dict, bytes)):
+ return DATA_MASKING_STRING
+ elif isinstance(data, (list, tuple, set)):
+ return type(data)([DATA_MASKING_STRING] * len(data))
+ return DATA_MASKING_STRING
diff --git a/aws_lambda_powertools/utilities/_data_masking/provider/kms/__init__.py b/aws_lambda_powertools/utilities/_data_masking/provider/kms/__init__.py
new file mode 100644
index 00000000000..f257339d634
--- /dev/null
+++ b/aws_lambda_powertools/utilities/_data_masking/provider/kms/__init__.py
@@ -0,0 +1,5 @@
+from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider
+
+__all__ = [
+ "AwsEncryptionSdkProvider",
+]
diff --git a/aws_lambda_powertools/utilities/_data_masking/provider/kms/aws_encryption_sdk.py b/aws_lambda_powertools/utilities/_data_masking/provider/kms/aws_encryption_sdk.py
new file mode 100644
index 00000000000..a895f8de0ac
--- /dev/null
+++ b/aws_lambda_powertools/utilities/_data_masking/provider/kms/aws_encryption_sdk.py
@@ -0,0 +1,177 @@
+from __future__ import annotations
+
+import base64
+from typing import Any, Callable, Dict, List
+
+import botocore
+from aws_encryption_sdk import (
+ CachingCryptoMaterialsManager,
+ EncryptionSDKClient,
+ LocalCryptoMaterialsCache,
+ StrictAwsKmsMasterKeyProvider,
+)
+
+from aws_lambda_powertools.shared.user_agent import register_feature_to_botocore_session
+from aws_lambda_powertools.utilities._data_masking.constants import (
+ CACHE_CAPACITY,
+ MAX_CACHE_AGE_SECONDS,
+ MAX_MESSAGES_ENCRYPTED,
+)
+from aws_lambda_powertools.utilities._data_masking.provider import BaseProvider
+
+
+class ContextMismatchError(Exception):
+ def __init__(self, key):
+ super().__init__(f"Encryption Context does not match expected value for key: {key}")
+ self.key = key
+
+
+class AwsEncryptionSdkProvider(BaseProvider):
+ """
+ The AwsEncryptionSdkProvider is used as a provider for the DataMasking class.
+
+ This provider allows you to perform data masking using the AWS Encryption SDK
+ for encryption and decryption. It integrates with the DataMasking class to
+ securely encrypt and decrypt sensitive data.
+
+ Usage Example:
+ ```
+ from aws_lambda_powertools.utilities.data_masking import DataMasking
+ from aws_lambda_powertools.utilities.data_masking.providers.kms.aws_encryption_sdk import (
+ AwsEncryptionSdkProvider,
+ )
+
+
+ def lambda_handler(event, context):
+ provider = AwsEncryptionSdkProvider(["arn:aws:kms:us-east-1:0123456789012:key/key-id"])
+ masker = DataMasking(provider=provider)
+
+ data = {
+ "project": "powertools",
+ "sensitive": "xxxxxxxxxx"
+ }
+
+ masked = masker.encrypt(data,fields=["sensitive"])
+
+ return masked
+
+ ```
+ """
+
+ def __init__(
+ self,
+ keys: List[str],
+ key_provider=None,
+ local_cache_capacity: int = CACHE_CAPACITY,
+ max_cache_age_seconds: float = MAX_CACHE_AGE_SECONDS,
+ max_messages_encrypted: int = MAX_MESSAGES_ENCRYPTED,
+ json_serializer: Callable | None = None,
+ json_deserializer: Callable | None = None,
+ ):
+ super().__init__(json_serializer=json_serializer, json_deserializer=json_deserializer)
+
+ self._key_provider = key_provider or KMSKeyProvider(
+ keys=keys,
+ local_cache_capacity=local_cache_capacity,
+ max_cache_age_seconds=max_cache_age_seconds,
+ max_messages_encrypted=max_messages_encrypted,
+ json_serializer=self.json_serializer,
+ json_deserializer=self.json_deserializer,
+ )
+
+ def encrypt(self, data: bytes | str | Dict | int, **provider_options) -> str:
+ return self._key_provider.encrypt(data=data, **provider_options)
+
+ def decrypt(self, data: str, **provider_options) -> Any:
+ return self._key_provider.decrypt(data=data, **provider_options)
+
+
+class KMSKeyProvider:
+
+ """
+ The KMSKeyProvider is responsible for assembling an AWS Key Management Service (KMS)
+ client, a caching mechanism, and a keyring for secure key management and data encryption.
+ """
+
+ def __init__(
+ self,
+ keys: List[str],
+ json_serializer: Callable,
+ json_deserializer: Callable,
+ local_cache_capacity: int = CACHE_CAPACITY,
+ max_cache_age_seconds: float = MAX_CACHE_AGE_SECONDS,
+ max_messages_encrypted: int = MAX_MESSAGES_ENCRYPTED,
+ ):
+ session = botocore.session.Session()
+ register_feature_to_botocore_session(session, "data-masking")
+
+ self.json_serializer = json_serializer
+ self.json_deserializer = json_deserializer
+ self.client = EncryptionSDKClient()
+ self.keys = keys
+ self.cache = LocalCryptoMaterialsCache(local_cache_capacity)
+ self.key_provider = StrictAwsKmsMasterKeyProvider(key_ids=self.keys, botocore_session=session)
+ self.cache_cmm = CachingCryptoMaterialsManager(
+ master_key_provider=self.key_provider,
+ cache=self.cache,
+ max_age=max_cache_age_seconds,
+ max_messages_encrypted=max_messages_encrypted,
+ )
+
+ def encrypt(self, data: bytes | str | Dict | float, **provider_options) -> str:
+ """
+ Encrypt data using the AwsEncryptionSdkProvider.
+
+ Parameters
+ -------
+ data : Union[bytes, str]
+ The data to be encrypted.
+ provider_options
+ Additional options for the aws_encryption_sdk.EncryptionSDKClient
+
+ Returns
+ -------
+ ciphertext : str
+ The encrypted data, as a base64-encoded string.
+ """
+ data_encoded = self.json_serializer(data)
+ ciphertext, _ = self.client.encrypt(
+ source=data_encoded,
+ materials_manager=self.cache_cmm,
+ **provider_options,
+ )
+ ciphertext = base64.b64encode(ciphertext).decode()
+ return ciphertext
+
+ def decrypt(self, data: str, **provider_options) -> Any:
+ """
+ Decrypt data using AwsEncryptionSdkProvider.
+
+ Parameters
+ -------
+ data : Union[bytes, str]
+ The encrypted data, as a base64-encoded string
+ provider_options
+ Additional options for the aws_encryption_sdk.EncryptionSDKClient
+
+ Returns
+ -------
+ ciphertext : bytes
+ The decrypted data in bytes
+ """
+ ciphertext_decoded = base64.b64decode(data)
+
+ expected_context = provider_options.pop("encryption_context", {})
+
+ ciphertext, decryptor_header = self.client.decrypt(
+ source=ciphertext_decoded,
+ key_provider=self.key_provider,
+ **provider_options,
+ )
+
+ for key, value in expected_context.items():
+ if decryptor_header.encryption_context.get(key) != value:
+ raise ContextMismatchError(key)
+
+ ciphertext = self.json_deserializer(ciphertext)
+ return ciphertext
diff --git a/aws_lambda_powertools/utilities/data_classes/__init__.py b/aws_lambda_powertools/utilities/data_classes/__init__.py
index 99754266928..d245bc35f0d 100644
--- a/aws_lambda_powertools/utilities/data_classes/__init__.py
+++ b/aws_lambda_powertools/utilities/data_classes/__init__.py
@@ -27,7 +27,7 @@
from .ses_event import SESEvent
from .sns_event import SNSEvent
from .sqs_event import SQSEvent
-from .vpc_lattice import VPCLatticeEvent
+from .vpc_lattice import VPCLatticeEvent, VPCLatticeEventV2
__all__ = [
"APIGatewayProxyEvent",
@@ -56,4 +56,5 @@
"event_source",
"AWSConfigRuleEvent",
"VPCLatticeEvent",
+ "VPCLatticeEventV2",
]
diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py
index fa7c5296042..28229c21a62 100644
--- a/aws_lambda_powertools/utilities/data_classes/common.py
+++ b/aws_lambda_powertools/utilities/data_classes/common.py
@@ -189,7 +189,7 @@ def get_header_value(
default_value: str, optional
Default value if no value was found by name
case_sensitive: bool
- Whether to use a case-sensitive look up
+ Whether to use a case-sensitive look up. By default we make a case-insensitive lookup.
Returns
-------
str, optional
diff --git a/aws_lambda_powertools/utilities/data_classes/vpc_lattice.py b/aws_lambda_powertools/utilities/data_classes/vpc_lattice.py
index 35194f1f3f0..00ba5136eec 100644
--- a/aws_lambda_powertools/utilities/data_classes/vpc_lattice.py
+++ b/aws_lambda_powertools/utilities/data_classes/vpc_lattice.py
@@ -4,7 +4,7 @@
BaseHeadersSerializer,
HttpApiHeadersSerializer,
)
-from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent
+from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent, DictWrapper
from aws_lambda_powertools.utilities.data_classes.shared_functions import (
base64_decode,
get_header_value,
@@ -12,7 +12,7 @@
)
-class VPCLatticeEvent(BaseProxyEvent):
+class VPCLatticeEventBase(BaseProxyEvent):
@property
def body(self) -> str:
"""The VPC Lattice body."""
@@ -30,11 +30,6 @@ def headers(self) -> Dict[str, str]:
"""The VPC Lattice event headers."""
return self["headers"]
- @property
- def is_base64_encoded(self) -> bool:
- """A boolean flag to indicate if the applicable request payload is Base64-encode"""
- return self["is_base64_encoded"]
-
@property
def decoded_body(self) -> str:
"""Dynamically base64 decode body as a str"""
@@ -48,24 +43,6 @@ def method(self) -> str:
"""The VPC Lattice method used. Valid values include: DELETE, GET, HEAD, OPTIONS, PATCH, POST, and PUT."""
return self["method"]
- @property
- def query_string_parameters(self) -> Dict[str, str]:
- """The request query string parameters."""
- return self["query_string_parameters"]
-
- @property
- def raw_path(self) -> str:
- """The raw VPC Lattice request path."""
- return self["raw_path"]
-
- # VPCLattice event has no path field
- # Added here for consistency with the BaseProxyEvent class
- @property
- def path(self) -> str:
- return self["raw_path"]
-
- # VPCLattice event has no http_method field
- # Added here for consistency with the BaseProxyEvent class
@property
def http_method(self) -> str:
"""The HTTP method used. Valid values include: DELETE, GET, HEAD, OPTIONS, PATCH, POST, and PUT."""
@@ -140,3 +117,137 @@ def get_header_value(
def header_serializer(self) -> BaseHeadersSerializer:
# When using the VPC Lattice integration, we have multiple HTTP Headers.
return HttpApiHeadersSerializer()
+
+
+class VPCLatticeEvent(VPCLatticeEventBase):
+ @property
+ def raw_path(self) -> str:
+ """The raw VPC Lattice request path."""
+ return self["raw_path"]
+
+ @property
+ def is_base64_encoded(self) -> bool:
+ """A boolean flag to indicate if the applicable request payload is Base64-encode"""
+ return self["is_base64_encoded"]
+
+ # VPCLattice event has no path field
+ # Added here for consistency with the BaseProxyEvent class
+ @property
+ def path(self) -> str:
+ return self["raw_path"]
+
+ @property
+ def query_string_parameters(self) -> Dict[str, str]:
+ """The request query string parameters."""
+ return self["query_string_parameters"]
+
+
+class vpcLatticeEventV2Identity(DictWrapper):
+ @property
+ def source_vpc_arn(self) -> Optional[str]:
+ """The VPC Lattice v2 Event requestContext Identity sourceVpcArn"""
+ return self.get("sourceVpcArn")
+
+ @property
+ def get_type(self) -> Optional[str]:
+ """The VPC Lattice v2 Event requestContext Identity type"""
+ return self.get("type")
+
+ @property
+ def principal(self) -> Optional[str]:
+ """The VPC Lattice v2 Event requestContext principal"""
+ return self.get("principal")
+
+ @property
+ def principal_org_id(self) -> Optional[str]:
+ """The VPC Lattice v2 Event requestContext principalOrgID"""
+ return self.get("principalOrgID")
+
+ @property
+ def session_name(self) -> Optional[str]:
+ """The VPC Lattice v2 Event requestContext sessionName"""
+ return self.get("sessionName")
+
+ @property
+ def x509_subject_cn(self) -> Optional[str]:
+ """The VPC Lattice v2 Event requestContext X509SubjectCn"""
+ return self.get("X509SubjectCn")
+
+ @property
+ def x509_issuer_ou(self) -> Optional[str]:
+ """The VPC Lattice v2 Event requestContext X509IssuerOu"""
+ return self.get("X509IssuerOu")
+
+ @property
+ def x509_san_dns(self) -> Optional[str]:
+ """The VPC Lattice v2 Event requestContext X509SanDns"""
+ return self.get("x509SanDns")
+
+ @property
+ def x509_san_uri(self) -> Optional[str]:
+ """The VPC Lattice v2 Event requestContext X509SanUri"""
+ return self.get("X509SanUri")
+
+ @property
+ def x509_san_name_cn(self) -> Optional[str]:
+ """The VPC Lattice v2 Event requestContext X509SanNameCn"""
+ return self.get("X509SanNameCn")
+
+
+class vpcLatticeEventV2RequestContext(DictWrapper):
+ @property
+ def service_network_arn(self) -> str:
+ """The VPC Lattice v2 Event requestContext serviceNetworkArn"""
+ return self["serviceNetworkArn"]
+
+ @property
+ def service_arn(self) -> str:
+ """The VPC Lattice v2 Event requestContext serviceArn"""
+ return self["serviceArn"]
+
+ @property
+ def target_group_arn(self) -> str:
+ """The VPC Lattice v2 Event requestContext targetGroupArn"""
+ return self["targetGroupArn"]
+
+ @property
+ def identity(self) -> vpcLatticeEventV2Identity:
+ """The VPC Lattice v2 Event requestContext identity"""
+ return vpcLatticeEventV2Identity(self["identity"])
+
+ @property
+ def region(self) -> str:
+ """The VPC Lattice v2 Event requestContext serviceNetworkArn"""
+ return self["region"]
+
+ @property
+ def time_epoch(self) -> float:
+ """The VPC Lattice v2 Event requestContext timeEpoch"""
+ return self["timeEpoch"]
+
+
+class VPCLatticeEventV2(VPCLatticeEventBase):
+ @property
+ def version(self) -> str:
+ """The VPC Lattice v2 Event version"""
+ return self["version"]
+
+ @property
+ def is_base64_encoded(self) -> Optional[bool]:
+ """A boolean flag to indicate if the applicable request payload is Base64-encode"""
+ return self.get("isBase64Encoded")
+
+ @property
+ def path(self) -> str:
+ """The VPC Lattice v2 Event path"""
+ return self["path"]
+
+ @property
+ def request_context(self) -> vpcLatticeEventV2RequestContext:
+ """he VPC Lattice v2 Event request context."""
+ return vpcLatticeEventV2RequestContext(self["requestContext"])
+
+ @property
+ def query_string_parameters(self) -> Optional[Dict[str, str]]:
+ """The request query string parameters."""
+ return self.get("queryStringParameters")
diff --git a/aws_lambda_powertools/utilities/parameters/appconfig.py b/aws_lambda_powertools/utilities/parameters/appconfig.py
index 87c6f5077b2..d5a9b7856e4 100644
--- a/aws_lambda_powertools/utilities/parameters/appconfig.py
+++ b/aws_lambda_powertools/utilities/parameters/appconfig.py
@@ -99,7 +99,8 @@ def __init__(
self.current_version = ""
self._next_token: Dict[str, str] = {} # nosec - token for get_latest_configuration executions
- self.last_returned_value = ""
+ # Dict to store the recently retrieved value for a specific configuration.
+ self.last_returned_value: Dict[str, str] = {}
def _get(self, name: str, **sdk_options) -> str:
"""
@@ -126,10 +127,14 @@ def _get(self, name: str, **sdk_options) -> str:
return_value = response["Configuration"].read()
self._next_token[name] = response["NextPollConfigurationToken"]
+ # The return of get_latest_configuration can be null because this value is supposed to be cached
+ # on the customer side.
+ # We created a dictionary that stores the most recently retrieved value for a specific configuration.
+ # See https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/appconfigdata/client/get_latest_configuration.html
if return_value:
- self.last_returned_value = return_value
+ self.last_returned_value[name] = return_value
- return self.last_returned_value
+ return self.last_returned_value[name]
def _get_multiple(self, path: str, **sdk_options) -> Dict[str, str]:
"""
diff --git a/aws_lambda_powertools/utilities/parser/envelopes/__init__.py b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py
index cbca982adf7..affffd98174 100644
--- a/aws_lambda_powertools/utilities/parser/envelopes/__init__.py
+++ b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py
@@ -11,6 +11,7 @@
from .sns import SnsEnvelope, SnsSqsEnvelope
from .sqs import SqsEnvelope
from .vpc_lattice import VpcLatticeEnvelope
+from .vpc_latticev2 import VpcLatticeV2Envelope
__all__ = [
"ApiGatewayEnvelope",
@@ -27,4 +28,5 @@
"KafkaEnvelope",
"BaseEnvelope",
"VpcLatticeEnvelope",
+ "VpcLatticeV2Envelope",
]
diff --git a/aws_lambda_powertools/utilities/parser/envelopes/vpc_latticev2.py b/aws_lambda_powertools/utilities/parser/envelopes/vpc_latticev2.py
new file mode 100644
index 00000000000..77dbf2a4a24
--- /dev/null
+++ b/aws_lambda_powertools/utilities/parser/envelopes/vpc_latticev2.py
@@ -0,0 +1,32 @@
+import logging
+from typing import Any, Dict, Optional, Type, Union
+
+from ..models import VpcLatticeV2Model
+from ..types import Model
+from .base import BaseEnvelope
+
+logger = logging.getLogger(__name__)
+
+
+class VpcLatticeV2Envelope(BaseEnvelope):
+ """Amazon VPC Lattice envelope to extract data within body key"""
+
+ def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) -> Optional[Model]:
+ """Parses data found with model provided
+
+ Parameters
+ ----------
+ data : Dict
+ Lambda event to be parsed
+ model : Type[Model]
+ Data model provided to parse after extracting data using envelope
+
+ Returns
+ -------
+ Optional[Model]
+ Parsed detail payload with model provided
+ """
+ logger.debug(f"Parsing incoming data with VPC Lattice V2 model {VpcLatticeV2Model}")
+ parsed_envelope: VpcLatticeV2Model = VpcLatticeV2Model.parse_obj(data)
+ logger.debug(f"Parsing event payload in `detail` with {model}")
+ return self._parse(data=parsed_envelope.body, model=model)
diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py
index f1b2d30d9cf..3c707fda61e 100644
--- a/aws_lambda_powertools/utilities/parser/models/__init__.py
+++ b/aws_lambda_powertools/utilities/parser/models/__init__.py
@@ -89,6 +89,7 @@
from .sns import SnsModel, SnsNotificationModel, SnsRecordModel
from .sqs import SqsAttributesModel, SqsModel, SqsMsgAttributeModel, SqsRecordModel
from .vpc_lattice import VpcLatticeModel
+from .vpc_latticev2 import VpcLatticeV2Model
__all__ = [
"APIGatewayProxyEventV2Model",
@@ -163,4 +164,5 @@
"CloudFormationCustomResourceCreateModel",
"CloudFormationCustomResourceBaseModel",
"VpcLatticeModel",
+ "VpcLatticeV2Model",
]
diff --git a/aws_lambda_powertools/utilities/parser/models/vpc_latticev2.py b/aws_lambda_powertools/utilities/parser/models/vpc_latticev2.py
new file mode 100644
index 00000000000..dc764684484
--- /dev/null
+++ b/aws_lambda_powertools/utilities/parser/models/vpc_latticev2.py
@@ -0,0 +1,42 @@
+from datetime import datetime
+from typing import Dict, Optional, Type, Union
+
+from pydantic import BaseModel, Field, validator
+
+
+class VpcLatticeV2RequestContextIdentity(BaseModel):
+ source_vpc_arn: Optional[str] = Field(None, alias="sourceVpcArn")
+ get_type: Optional[str] = Field(None, alias="type")
+ principal: Optional[str] = Field(None, alias="principal")
+ principal_org_id: Optional[str] = Field(None, alias="principalOrgID")
+ session_name: Optional[str] = Field(None, alias="sessionName")
+ x509_subject_cn: Optional[str] = Field(None, alias="X509SubjectCn")
+ x509_issuer_ou: Optional[str] = Field(None, alias="X509IssuerOu")
+ x509_san_dns: Optional[str] = Field(None, alias="x509SanDns")
+ x509_san_uri: Optional[str] = Field(None, alias="X509SanUri")
+ x509_san_name_cn: Optional[str] = Field(None, alias="X509SanNameCn")
+
+
+class VpcLatticeV2RequestContext(BaseModel):
+ service_network_arn: str = Field(alias="serviceNetworkArn")
+ service_arn: str = Field(alias="serviceArn")
+ target_group_arn: str = Field(alias="targetGroupArn")
+ identity: VpcLatticeV2RequestContextIdentity
+ region: str
+ time_epoch: float = Field(alias="timeEpoch")
+ time_epoch_as_datetime: datetime = Field(alias="timeEpoch")
+
+ @validator("time_epoch_as_datetime", pre=True, allow_reuse=True)
+ def time_epoch_convert_to_miliseconds(cls, value: int):
+ return round(int(value) / 1000)
+
+
+class VpcLatticeV2Model(BaseModel):
+ version: str
+ path: str
+ method: str
+ headers: Dict[str, str]
+ query_string_parameters: Optional[Dict[str, str]] = Field(None, alias="queryStringParameters")
+ body: Optional[Union[str, Type[BaseModel]]] = None
+ is_base64_encoded: Optional[bool] = Field(None, alias="isBase64Encoded")
+ request_context: VpcLatticeV2RequestContext = Field(None, alias="requestContext")
diff --git a/aws_lambda_powertools/utilities/parser/parser.py b/aws_lambda_powertools/utilities/parser/parser.py
index 7e2d69e429c..a45e4880b18 100644
--- a/aws_lambda_powertools/utilities/parser/parser.py
+++ b/aws_lambda_powertools/utilities/parser/parser.py
@@ -1,4 +1,5 @@
import logging
+import typing
from typing import Any, Callable, Dict, Optional, Type, overload
from aws_lambda_powertools.utilities.parser.compat import disable_pydantic_v2_warning
@@ -17,7 +18,7 @@ def event_parser(
handler: Callable[[Any, LambdaContext], EventParserReturnType],
event: Dict[str, Any],
context: LambdaContext,
- model: Type[Model],
+ model: Optional[Type[Model]] = None,
envelope: Optional[Type[Envelope]] = None,
) -> EventParserReturnType:
"""Lambda handler decorator to parse & validate events using Pydantic models
@@ -76,10 +77,22 @@ def handler(event: Order, context: LambdaContext):
ValidationError
When input event does not conform with model provided
InvalidModelTypeError
- When model given does not implement BaseModel
+ When model given does not implement BaseModel or is not provided
InvalidEnvelopeError
When envelope given does not implement BaseEnvelope
"""
+
+ # The first parameter of a Lambda function is always the event
+ # This line get the model informed in the event_parser function
+ # or the first parameter of the function by using typing.get_type_hints
+ type_hints = typing.get_type_hints(handler)
+ model = model or (list(type_hints.values())[0] if type_hints else None)
+ if model is None:
+ raise InvalidModelTypeError(
+ "The model must be provided either as the `model` argument to `event_parser`"
+ "or as the type hint of `event` in the handler that it wraps",
+ )
+
parsed_event = parse(event=event, model=model, envelope=envelope) if envelope else parse(event=event, model=model)
logger.debug(f"Calling handler {handler.__name__}")
return handler(parsed_event, context)
diff --git a/docs/Dockerfile b/docs/Dockerfile
index 36cd6409160..f81d8c6bd2d 100644
--- a/docs/Dockerfile
+++ b/docs/Dockerfile
@@ -1,5 +1,5 @@
# v9.1.18
-FROM squidfunk/mkdocs-material@sha256:b41ba6dba2047d995bb89bf5a9a01e7eca05d2d8bd34d42028fa85399c825766
+FROM squidfunk/mkdocs-material@sha256:cb38dc2e1094228cad571eb0c9c6f0ec760adfa4c3e6112fda57e8b62ae18592
# pip-compile --generate-hashes --output-file=requirements.txt requirements.in
COPY requirements.txt /tmp/
RUN pip install --require-hashes -r /tmp/requirements.txt
diff --git a/docs/contributing/conventions.md b/docs/contributing/conventions.md
new file mode 100644
index 00000000000..23db25b9ad7
--- /dev/null
+++ b/docs/contributing/conventions.md
@@ -0,0 +1,34 @@
+---
+title: Conventions
+description: General conventions and practices that are applicable throughout to Powertools for AWS Lambda (Python)
+---
+
+
+
+## General terminology and practices
+
+These are common conventions we keep on building as the project gains new contributors and grows in complexity.
+
+As we gather more concrete examples, this page will have one section for each category to demonstrate a before and after.
+
+| Category | Convention |
+| --------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| **Docstring** | We use [Numpy convention](https://numpydoc.readthedocs.io/en/latest/format.html){target="_blank"} with markdown to help generate more readable API references. For public APIs, we always include at least one **Example** to ease everyone's experience when using an IDE. |
+| **Style guide** | We use black and [Ruff](https://beta.ruff.rs/docs/) to enforce beyond good practices [PEP8](https://pep8.org/). We use type annotations and enforce static type checking at CI (mypy). |
+| **Core utilities** | Core utilities always accept `service` as a constructor parameter, can work in isolation, and are also available in other languages implementation. |
+| **Utilities** | Utilities are not as strict as core and focus on community needs: development productivity, industry leading practices, etc. Both core and general utilities follow our [Tenets](https://docs.powertools.aws.dev/lambda/python/#tenets). |
+| **Exceptions** | Specific exceptions live within utilities themselves and use `Error` suffix e.g. `MetricUnitError`. |
+| **Git commits** | We follow [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/). We do not enforce conventional commits on contributors to lower the entry bar. Instead, we enforce a conventional PR title so our label automation and changelog are generated correctly. |
+| **API documentation** | API reference docs are generated from docstrings which should have Examples section to allow developers to have what they need within their own IDE. Documentation website covers the wider usage, tips, and strive to be concise. |
+| **Documentation** | We treat it like a product. We sub-divide content aimed at getting started (80% of customers) vs advanced usage (20%). We also ensure customers know how to unit test their code when using our features. |
+
+## Testing definition
+
+We group tests in different categories
+
+| Test | When to write | Notes | Speed |
+| ----------------- | ----------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------- |
+| Unit tests | Verify the smallest possible unit works. | Networking access is prohibited. Prefer Functional tests given our complexity. | Lightning fast (nsec to ms) |
+| Functional tests | Guarantee functionality works as expected. It's a subset of integration test covering multiple units. | No external dependency. Prefer Fake implementations (in-memory) over Mocks and Stubs. | Fast (ms to few seconds at worst) |
+| End-to-end tests | Gain confidence that a Lambda function with our code operates as expected. | It simulates how customers configure, deploy, and run their Lambda function - Event Source configuration, IAM permissions, etc. | Slow (minutes) |
+| Performance tests | Ensure critical operations won't increase latency and costs to customers. | CI arbitrary hardware can make it flaky. We'll resume writing perf test after we revamp our functional tests with internal utilities. | Fast to moderate (a few seconds to a few minutes) |
diff --git a/docs/contributing/documentation/rfcs.md b/docs/contributing/documentation/rfcs.md
new file mode 100644
index 00000000000..4a9f925adbb
--- /dev/null
+++ b/docs/contributing/documentation/rfcs.md
@@ -0,0 +1,10 @@
+---
+title: Writing Request For Comment (RFC)
+description: Contributing RFCs to Powertools for AWS Lambda (Python)
+---
+
+
+
+## TBW
+
+Something great will come.
diff --git a/docs/contributing/getting_started.md b/docs/contributing/getting_started.md
new file mode 100644
index 00000000000..3cdcc6b1ddc
--- /dev/null
+++ b/docs/contributing/getting_started.md
@@ -0,0 +1,104 @@
+---
+title: Your first contribution
+description: All you need to know for your first contribution to Powertools for AWS Lambda (Python)
+---
+
+
+
+Thank you for your interest in contributing to our project - we couldn't be more excited!
+
+
+```mermaid
+graph LR
+ Learn["Learn about contributions"] --> Find["Find areas to work / get mentoring"] --> Work["Prepare pull request"] --> Closing["Take learnings with you"]
+```
+End-to-end process
+
+
+## Types of contributions
+
+We consider any contribution that help this project improve everyone's experience to be valid, as long as you agree with our [tenets](../index.md#tenets){target="_blank"}, [licensing](../../LICENSE){target="_blank"}, and [Code of Conduct](#code-of-conduct).
+
+Whether you're new contributor or a pro, we compiled a list of the common contributions to help you choose your first:
+
+!!! info "Please check [existing open](https://github.com/aws-powertools/powertools-lambda-python/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-desc){target='_blank'}, or [recently closed](https://github.com/aws-powertools/powertools-lambda-python/issues?q=is%3Aissue+sort%3Aupdated-desc+is%3Aclosed){target='_blank'} issues before creating a new one."
+ Each type link goes to their respective template, or Discord invite.
+
+| Type | Description |
+| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
+| [Documentation](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=documentation%2Ctriage&projects=&template=documentation_improvements.yml&title=Docs%3A+TITLE){target="_blank" rel="nofollow"} | Ideas to make user guide or API guide clearer. It generally go from typos, diagrams, tutorials, the lack of documentation, etc. |
+| [Feature request](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2Ctriage&projects=&template=feature_request.yml&title=Feature+request%3A+TITLE){target="_blank" rel="nofollow"} | New functionalities or enhancements that could help you, your team, existing and future customers. Check out our [process to understand how we prioritize it](../roadmap.md#process){target="_blank"}. |
+| [Design proposals](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=RFC%2Ctriage&projects=&template=rfc.yml&title=RFC%3A+TITLE){target="_blank" rel="nofollow"} | Request for Comments (RFC) including user experience (UX) based on a feature request to gather the community feedback, and demonstrate the art of the possible. |
+| [Bug report](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=bug%2Ctriage&projects=&template=bug_report.yml&title=Bug%3A+TITLE){target="_blank" rel="nofollow"} | A runtime error that is reproducible whether you have an idea how to solve it or not. |
+| [Advocacy](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=community-content&projects=&template=share_your_work.yml&title=%5BI+Made+This%5D%3A+%3CTITLE%3E){target="_blank" rel="nofollow"} | Share what you did with Powertools for AWS Lambda. Blog posts, workshops, presentation, sample applications, podcasts, etc. |
+| [Public reference](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=customer-reference&projects=&template=support_powertools.yml&title=%5BSupport+Powertools+for+AWS+Lambda+%28Python%29%5D%3A+%3Cyour+organization+name%3E){target="_blank" rel="nofollow"} | Become a public reference to share how you're using Powertools for AWS Lambda at your organization. |
+| [Discussions](https://discord.gg/B8zZKbbyET){target="_blank" rel="nofollow"} | Kick off a discussion on Discord, introduce yourself, and help respond to existing questions from the community. |
+| [Static typing](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=typing%2Ctriage&projects=&template=static_typing.yml&title=Static+typing%3A+TITLE){target="_blank" rel="nofollow"} | Improvements to increase or correct static typing coverage to ease maintenance, autocompletion, etc. |
+| [Technical debt](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=tech-debt%2Ctriage&projects=&template=tech_debt.yml&title=Tech+debt%3A+TITLE){target="_blank" rel="nofollow"} | Suggest areas to address technical debt that could make maintenance easier or provide customer value faster. Generally used by maintainers and contributors. |
+| [Governance](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=internal%2Ctriage&projects=&template=maintenance.yml&title=Maintenance%3A+TITLE){target="_blank" rel="nofollow"} | Ideas to improve to our governance processes, automation, and anything internal. Typically used by maintainers and regular contributors. |
+
+## Finding contributions to work on
+
+[Besides suggesting ideas](#types-of-contributions) you think it'll improve everyone's experience, these are the most common places to find work:
+
+| Area | Description |
+| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| [Help wanted issues](https://github.com/aws-powertools/powertools-lambda-python/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-desc+label%3A%22help+wanted%22+){target="_blank" rel="nofollow"} | These are triaged areas that we'd appreciate any level of contribution - from opinions to actual implementation. |
+| [Missing customer feedback issues](https://github.com/aws-powertools/powertools-lambda-python/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-desc+label%3Aneed-customer-feedback){target="_blank" rel="nofollow"} | These are items we'd like to hear from more customers before making any decision. Sharing your thoughts, use case, or asking additional questions are great help. |
+| [Pending design proposals](https://github.com/aws-powertools/powertools-lambda-python/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-desc+label%3ARFC){target="_blank" rel="nofollow"} | These are feature requests that initially look good but need a RFC to enrich the discussion by validating user-experience, tradeoffs, and highlight use cases. |
+| [Backlog items](https://github.com/orgs/aws-powertools/projects/3/views/3?query=is%3Aopen+sort%3Aupdated-desc){target="_blank" rel="nofollow"} | We use GitHub projects to surface what we're working on, needs triage, etc. This view shows items we already triaged but don't have the bandwidth to tackle them just yet. |
+| [Documentation](https://docs.powertools.aws.dev/lambda/python/latest/){target="_blank"} | Documentation can always be improved. Look for areas that a better example, or a diagram, or more context helps everyone - keep in mind a diverse audience and English as a second language folks. |
+| [Participate in discussions](https://discord.gg/B8zZKbbyET){target="_blank" rel="nofollow"} | There's always a discussion that could benefit others in the form of documentation, blog post, etc. |
+| [Roadmap](../roadmap.md){target="_blank"} | Some roadmap items need a RFC to discuss design options, or gather customers use case before we can prioritize it. |
+| Build a sample application | Using Powertools for AWS Lambda in different contexts will give you insights on what could be made easier, which documentation could be enriched, and more. |
+
+!!! question "Still couldn't find anything that match your skill set?"
+ Please reach out on [Discord](https://discord.gg/B8zZKbbyET){target="_blank" rel="nofollow"}, specially if you'd like to get mentoring for a task you'd like to take but you don't feel ready yet :)
+
+ Contributions are meant to be bi-directional. There's always something we can learn from each other.
+
+## Sending a pull request
+
+!!! note "First time creating a Pull Request? Keep [this document handy.](https://help.github.com/articles/creating-a-pull-request/){target='blank' rel='nofollow'}"
+
+Before sending us a pull request, please ensure that:
+
+* [ ] You are working against the latest source on the **develop** branch.
+* [ ] You check existing [open, and recently merged](https://github.com/aws-powertools/powertools-lambda-python/pulls?q=is%3Apr+is%3Aopen%2Cmerged+sort%3Aupdated-desc){target="_blank" rel="nofollow"} pull requests to make sure someone else hasn't addressed the problem already.
+* [ ] You open an [issue](https://github.com/aws-powertools/powertools-lambda-python/issues/new/choose){target="_blank" rel="nofollow"} before you begin any implementation. We value your time and bandwidth. As such, any pull requests created on non-triaged issues might not be successful.
+* [ ] Create a new branch named after the change you are contributing _e.g._ `feat/logger-debug-sampling`
+
+**Ready?**
+
+These are the steps to send a pull request:
+
+1. Run all formatting, linting, tests, documentation and baseline checks: `make pr`
+2. Commit to your fork using clear commit messages. Don't worry about typos or format, we squash all commits during merge.
+3. Send us a pull request with a [conventional semantic title](https://github.com/aws-powertools/powertools-lambda-python/pull/67).
+4. Fill in the areas pre-defined in the pull request body to help expedite reviewing your work.
+5. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
+
+## Code of Conduct
+
+!!! info "This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct){target='_blank'}"
+
+For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
+ with any additional questions or comments.
+
+## Security issue notifications
+
+If you discover a potential security issue in this project, we kindly ask you to notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
+
+## Troubleshooting
+
+### API reference documentation
+
+When you are working on the codebase and you use the local API reference documentation to preview your changes, you might see the following message: `Module aws_lambda_powertools not found`.
+
+This happens when:
+
+* You did not install the local dev environment yet
+ * You can install dev deps with `make dev` command
+* The code in the repository is raising an exception while the `pdoc` is scanning the codebase
+ * Unfortunately, this exception is not shown to you, but if you run, `poetry run pdoc --pdf aws_lambda_powertools`, the exception is shown and you can prevent the exception from being raised
+ * Once resolved the documentation should load correctly again
diff --git a/docs/contributing/setup.md b/docs/contributing/setup.md
new file mode 100644
index 00000000000..50533fad4b6
--- /dev/null
+++ b/docs/contributing/setup.md
@@ -0,0 +1,67 @@
+---
+title: Development environment
+description: Setting up your development environment for contribution
+---
+
+
+
+[](https://discord.gg/B8zZKbbyET){target="_blank" rel="nofollow"}
+
+This page describes how to setup your development environment (Cloud or locally) to contribute to Powertools for AWS Lambda.
+
+
+
+## Requirements
+
+!!! question "First time contributing to an open-source project ever?"
+ Read this [introduction on how to fork and clone a project on GitHub](https://docs.github.com/en/get-started/quickstart/contributing-to-projects){target="_blank" rel="nofollow"}.
+
+Unless you're using the pre-configured Cloud environment, you'll need the following installed:
+
+* [GitHub account](https://github.com/join){target="_blank" rel="nofollow"}. You'll need to be able to fork, clone, and contribute via pull request.
+* [Python 3.8+](https://www.python.org/downloads/){target="_blank" rel="nofollow"}. Pick any version supported in [AWS Lambda runtime](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html).
+* [Docker](https://docs.docker.com/engine/install/){target="_blank" rel="nofollow"}. We use it to run documentation linters and non-Python tooling.
+* [Fork the repository](https://github.com/aws-powertools/powertools-lambda-python/fork). You'll work against your fork of this repository.
+
+??? note "Additional requirements if running end-to-end tests"
+
+ * [AWS CDK CLI](https://docs.aws.amazon.com/cdk/v2/guide/getting_started.html#getting_started_prerequisites){target="_blank"}
+ * [AWS Account bootstrapped with CDK](https://docs.aws.amazon.com/cdk/v2/guide/bootstrapping.html){target="_blank"}
+ * [AWS CLI installed and configured](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html)
+
+## Cloud environment
+
+> **NOTE**. Be mindful of [Gitpod pricing structure](https://www.gitpod.io/pricing){target="_blank" rel="nofollow"} for long-running contributions. When in doubt, use the local environment below.
+
+To use a pre-configured environment, replace `YOUR_USERNAME` with your GitHub username or organization.
+
+```bash
+https://gitpod.io/#https://github.com/YOUR_USERNAME/powertools-lambda-python #(1)!
+```
+
+1. For example, my username is `heitorlessa`.
Therefore, my final URL should be `https://gitpod.io/#https://github.com/heitorlessa/powertools-lambda-python`
+
+Once provisioned, it'll install all development dependencies and tools you'll need to contribute.
+
+## Local environment
+
+> Assuming you've got all [requirements](#requirements).
+
+You can use `make dev` to create a local virtual environment and install all dependencies locally.
+
+!!! note "Curious about what `make dev` does under the hood?"
+ We use `Make` to [automate common tasks](https://github.com/aws-powertools/powertools-lambda-python/blob/1ebe3275a5c53aed5a8eb76318e7d0af2367edfa/Makefile#L7){target="_blank" rel="nofollow"} locally and in Continuous Integration environments.
+
+## Local documentation
+
+You might find useful to run both the documentation website and the API reference locally while contributing:
+
+* **Docs website**: `make docs-local`
+ * If you prefer using Docker: `make docs-local-docker`
+* **API reference**: `make docs-api-local`
diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md
index 4e4e935f699..5ae15a9df2e 100644
--- a/docs/core/event_handler/api_gateway.md
+++ b/docs/core/event_handler/api_gateway.md
@@ -122,15 +122,27 @@ When using [AWS Lambda Function URL](https://docs.aws.amazon.com/lambda/latest/d
#### VPC Lattice
-When using [VPC Lattice with AWS Lambda](https://docs.aws.amazon.com/lambda/latest/dg/services-vpc-lattice.html){target="_blank"}, you can use `VPCLatticeResolver`.
+When using [VPC Lattice with AWS Lambda](https://docs.aws.amazon.com/lambda/latest/dg/services-vpc-lattice.html){target="_blank"}, you can use `VPCLatticeV2Resolver`.
-=== "getting_started_vpclattice_resolver.py"
+=== "Payload v2 (Recommended)"
+
+ ```python hl_lines="5 11" title="Using VPC Lattice resolver"
+ --8<-- "examples/event_handler_rest/src/getting_started_vpclatticev2_resolver.py"
+ ```
+
+=== "Payload v2 (Recommended) - Sample Event"
+
+ ```json hl_lines="2 3" title="Example payload delivered to the handler"
+ --8<-- "examples/event_handler_rest/src/getting_started_vpclatticev2_resolver.json"
+ ```
+
+=== "Payload v1"
```python hl_lines="5 11" title="Using VPC Lattice resolver"
--8<-- "examples/event_handler_rest/src/getting_started_vpclattice_resolver.py"
```
-=== "getting_started_vpclattice_resolver.json"
+=== "Payload v1 - Sample Event"
```json hl_lines="2 3" title="Example payload delivered to the handler"
--8<-- "examples/event_handler_rest/src/getting_started_vpclattice_resolver.json"
@@ -227,7 +239,7 @@ You can access the raw payload via `body` property, or if it's a JSON string you
#### Headers
-Similarly to [Query strings](#query-strings-and-payload), you can access headers as dictionary via `app.current_event.headers`, or by name via `get_header_value`.
+Similarly to [Query strings](#query-strings-and-payload), you can access headers as dictionary via `app.current_event.headers`, or by name via `get_header_value`. If you prefer a case-insensitive lookup of the header value, the `app.current_event.get_header_value` function automatically handles it.
```python hl_lines="19" title="Accessing HTTP Headers"
--8<-- "examples/event_handler_rest/src/accessing_request_details_headers.py"
diff --git a/docs/core/logger.md b/docs/core/logger.md
index 6fc48e8898f..9e6ff074c43 100644
--- a/docs/core/logger.md
+++ b/docs/core/logger.md
@@ -39,7 +39,7 @@ Your Logger will include the following keys to your structured logging:
| **level**: `str` | `INFO` | Logging level |
| **location**: `str` | `collect.handler:1` | Source code location where statement was executed |
| **message**: `Any` | `Collecting payment` | Unserializable JSON values are casted as `str` |
-| **timestamp**: `str` | `2021-05-03 10:20:19,650+0200` | Timestamp with milliseconds, by default uses local timezone |
+| **timestamp**: `str` | `2021-05-03 10:20:19,650+0000` | Timestamp with milliseconds, by default uses default AWS Lambda timezone (UTC) |
| **service**: `str` | `payment` | Service name defined, by default `service_undefined` |
| **xray_trace_id**: `str` | `1-5759e988-bd862e3fe1be46a994272793` | When [tracing is enabled](https://docs.aws.amazon.com/lambda/latest/dg/services-xray.html){target="_blank"}, it shows X-Ray Trace ID |
| **sampling_rate**: `float` | `0.1` | When enabled, it shows sampling rate in percentage e.g. 10% |
@@ -319,14 +319,30 @@ Logger can optionally log uncaught exceptions by setting `log_uncaught_exception
--8<-- "examples/logger/src/logging_uncaught_exceptions_output.json"
```
+#### Stack trace logging
+
+By default, the Logger will automatically include the full stack trace in JSON format when using `logger.exception`. If you want to disable this feature, set `serialize_stacktrace=False` during initialization."
+
+=== "logging_stacktrace.py"
+
+ ```python hl_lines="7 15"
+ --8<-- "examples/logger/src/logging_stacktrace.py"
+ ```
+
+=== "logging_stacktrace_output.json"
+
+ ```json hl_lines="9-27"
+ --8<-- "examples/logger/src/logging_stacktrace_output.json"
+ ```
+
### Date formatting
-Logger uses Python's standard logging date format with the addition of timezone: `2021-05-03 11:47:12,494+0200`.
+Logger uses Python's standard logging date format with the addition of timezone: `2021-05-03 11:47:12,494+0000`.
You can easily change the date format using one of the following parameters:
* **`datefmt`**. You can pass any [strftime format codes](https://strftime.org/){target="_blank" rel="nofollow"}. Use `%F` if you need milliseconds.
-* **`use_rfc3339`**. This flag will use a format compliant with both RFC3339 and ISO8601: `2022-10-27T16:27:43.738+02:00`
+* **`use_rfc3339`**. This flag will use a format compliant with both RFC3339 and ISO8601: `2022-10-27T16:27:43.738+00:00`
???+ tip "Prefer using [datetime string formats](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes){target="_blank" rel="nofollow"}?"
Use `use_datetime_directive` flag along with `datefmt` to instruct Logger to use `datetime` instead of `time.strftime`.
@@ -352,6 +368,7 @@ The following environment variables are available to configure Logger at a globa
| **Event Logging** | Whether to log the incoming event. | `POWERTOOLS_LOGGER_LOG_EVENT` | `false` |
| **Debug Sample Rate** | Sets the debug log sampling. | `POWERTOOLS_LOGGER_SAMPLE_RATE` | `0` |
| **Disable Deduplication** | Disables log deduplication filter protection to use Pytest Live Log feature. | `POWERTOOLS_LOG_DEDUPLICATION_DISABLED` | `false` |
+| **TZ** | Sets timezone when using Logger, e.g., `US/Eastern`. Timezone is defaulted to UTC when `TZ` is not set | `TZ` | `None` (UTC) |
[`POWERTOOLS_LOGGER_LOG_EVENT`](#logging-incoming-event) can also be set on a per-method basis, and [`POWERTOOLS_LOGGER_SAMPLE_RATE`](#sampling-debug-logs) on a per-instance basis. These parameter values will override the environment variable value.
@@ -448,7 +465,7 @@ If you prefer configuring it separately, or you'd want to bring this JSON Format
| **`json_default`** | function to coerce unserializable values, when no custom serializer/deserializer is set | `str` |
| **`datefmt`** | string directives (strftime) to format log timestamp | `%Y-%m-%d %H:%M:%S,%F%z`, where `%F` is a custom ms directive |
| **`use_datetime_directive`** | format the `datefmt` timestamps using `datetime`, not `time` (also supports the custom `%F` directive for milliseconds) | `False` |
-| **`utc`** | set logging timestamp to UTC | `False` |
+| **`utc`** | enforce logging timestamp to UTC (ignore `TZ` environment variable) | `False` |
| **`log_record_order`** | set order of log keys when logging | `["level", "location", "message", "timestamp"]` |
| **`kwargs`** | key-value to be included in log messages | `None` |
@@ -567,17 +584,26 @@ You can change the order of [standard Logger keys](#standard-structured-keys) or
--8<-- "examples/logger/src/reordering_log_keys_output.json"
```
-#### Setting timestamp to UTC
+#### Setting timestamp to custom Timezone
+
+By default, this Logger and the standard logging library emit records with the default AWS Lambda timestamp in **UTC**.
+
+
+If you prefer to log in a specific timezone, you can configure it by setting the `TZ` environment variable. You can do this either as an AWS Lambda environment variable or directly within your Lambda function settings. [Click here](https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html#configuration-envvars-runtime){target="_blank"} for a comprehensive list of available Lambda environment variables.
+
-By default, this Logger and standard logging library emits records using local time timestamp. You can override this behavior via `utc` parameter:
+???+ tip
+ `TZ` environment variable will be ignored if `utc` is set to `True`
-=== "setting_utc_timestamp.py"
+=== "setting_custom_timezone.py"
- ```python hl_lines="6"
+ ```python hl_lines="9 12"
--8<-- "examples/logger/src/setting_utc_timestamp.py"
```
-=== "setting_utc_timestamp_output.json"
+ 1. if you set TZ in your Lambda function, `time.tzset()` need to be called. You don't need it when setting TZ in AWS Lambda environment variable
+
+=== "setting_custom_timezone_output.json"
```json hl_lines="6 13"
--8<-- "examples/logger/src/setting_utc_timestamp_output.json"
diff --git a/docs/index.md b/docs/index.md
index dd0a74e5389..6ff5a3fa5c4 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -26,8 +26,8 @@ Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverles
You can install Powertools for AWS Lambda (Python) using one of the following options:
-* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:44**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard:
-* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard:
+* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:45**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard:
+* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard:
* **Pip**: **[`pip install "aws-lambda-powertools"`](#){: .copyMe}:clipboard:**
!!! question "Looking for Pip signed releases? [Learn more about verifying signed builds](./security.md#verifying-signed-builds)"
@@ -80,61 +80,66 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
| Region | Layer ARN |
| ---------------- | ---------------------------------------------------------------------------------------------------------- |
- | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `ap-south-2` | [arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `ap-southeast-4` | [arn:aws:lambda:ap-southeast-4:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `eu-central-2` | [arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `eu-south-2` | [arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `il-central-1` | [arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:4](#){: .copyMe}:clipboard: |
- | `me-central-1` | [arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
- | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:44](#){: .copyMe}:clipboard: |
+ | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `ap-south-2` | [arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `ap-southeast-4` | [arn:aws:lambda:ap-southeast-4:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `eu-central-2` | [arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `eu-south-2` | [arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `il-central-1` | [arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `me-central-1` | [arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
+ | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:45](#){: .copyMe}:clipboard: |
=== "arm64"
| Region | Layer ARN |
| ---------------- | ---------------------------------------------------------------------------------------------------------------- |
- | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
- | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44](#){: .copyMe}:clipboard: |
+ | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `ap-south-2` | [arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `eu-central-2` | [arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `eu-south-2` | [arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `il-central-1` | [arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `me-central-1` | [arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
+ | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45](#){: .copyMe}:clipboard: |
??? note "Note: Click to expand and copy code snippets for popular frameworks"
@@ -147,7 +152,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
Type: AWS::Serverless::Function
Properties:
Layers:
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:44
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:45
```
=== "Serverless framework"
@@ -157,7 +162,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
hello:
handler: lambda_function.lambda_handler
layers:
- - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:44
+ - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:45
```
=== "CDK"
@@ -173,7 +178,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn(
self,
id="lambda-powertools",
- layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:44"
+ layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:45"
)
aws_lambda.Function(self,
'sample-app-lambda',
@@ -222,7 +227,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
role = aws_iam_role.iam_for_lambda.arn
handler = "index.test"
runtime = "python3.9"
- layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:44"]
+ layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:45"]
source_code_hash = filebase64sha256("lambda_function_payload.zip")
}
@@ -275,7 +280,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
? Do you want to configure advanced settings? Yes
...
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45
❯ amplify push -y
@@ -286,7 +291,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
- Name:
? Which setting do you want to update? Lambda layers configuration
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:44
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:45
? Do you want to edit the local lambda function now? No
```
@@ -300,7 +305,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
Properties:
Architectures: [arm64]
Layers:
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45
```
=== "Serverless framework"
@@ -311,7 +316,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
handler: lambda_function.lambda_handler
architecture: arm64
layers:
- - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44
+ - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45
```
=== "CDK"
@@ -327,7 +332,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn(
self,
id="lambda-powertools",
- layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44"
+ layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45"
)
aws_lambda.Function(self,
'sample-app-lambda',
@@ -377,7 +382,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
role = aws_iam_role.iam_for_lambda.arn
handler = "index.test"
runtime = "python3.9"
- layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44"]
+ layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45"]
architectures = ["arm64"]
source_code_hash = filebase64sha256("lambda_function_payload.zip")
@@ -433,7 +438,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
? Do you want to configure advanced settings? Yes
...
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45
❯ amplify push -y
@@ -444,7 +449,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
- Name:
? Which setting do you want to update? Lambda layers configuration
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:44
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:45
? Do you want to edit the local lambda function now? No
```
@@ -452,7 +457,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd
Change {region} to your AWS region, e.g. `eu-west-1`
```bash title="AWS CLI"
- aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:44 --region {region}
+ aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:45 --region {region}
```
The pre-signed URL to download this Lambda Layer will be within `Location` key.
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 4862eca9579..c35bc5bc713 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -16,9 +16,9 @@ gitdb==4.0.10 \
--hash=sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a \
--hash=sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7
# via gitpython
-gitpython==3.1.35 \
- --hash=sha256:9cbefbd1789a5fe9bcf621bb34d3f441f3a90c8461d377f84eda73e721d9b06b \
- --hash=sha256:c19b4292d7a1d3c0f653858db273ff8a6614100d1eb1528b014ec97286193c09
+gitpython==3.1.37 \
+ --hash=sha256:5f4c4187de49616d710a77e98ddf17b4782060a1788df441846bddefbb89ab33 \
+ --hash=sha256:f9b9ddc0761c125d5780eab2d64be4873fc6817c2899cbcb34b02344bdc7bc54
# via mkdocs-git-revision-date-plugin
jinja2==3.1.2 \
--hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
diff --git a/docs/roadmap.md b/docs/roadmap.md
index c2de6829fb4..e42fae21c97 100644
--- a/docs/roadmap.md
+++ b/docs/roadmap.md
@@ -36,8 +36,9 @@ Data Masking will be a new utility to mask/unmask sensitive data using encryptio
**Major updates**
- [x] [RFC to agree on design and MVP](https://github.com/aws-powertools/powertools-lambda-python/issues/1858)
-- [ ] [POC with AWS KMS as the default provider](https://github.com/aws-powertools/powertools-lambda-python/pull/2197)
-- [ ] Documentation to guide customers how to bring their own provider (e.g., `ItsDangerous`)
+- [x] [POC with AWS KMS as the default provider](https://github.com/aws-powertools/powertools-lambda-python/pull/2197)
+- [ ] User-guide documentation and include when not to use it (e.g., when to use SNS data policy, CloudWatch Logs data policy)
+- [ ] Decide whether to use Encryption SDK to bring their own provider or a simply a contract (e.g., `ItsDangerous`)
### Revamp Event Handler
@@ -49,8 +50,8 @@ Based on customers feedback, we want to provide middleware authoring support for
- [x] [Agree on experience for middleware support](https://github.com/aws-powertools/powertools-lambda-python/issues/953#issuecomment-1450223155)
- [x] [RFC to outline initial thoughts on OpenAPI integration](https://github.com/aws-powertools/powertools-lambda-python/issues/2421)
-- [ ] MVP for REST middleware
-- [ ] MVP for OpenAPI and SwaggerUI
+- [x] [MVP for REST middleware](./core/event_handler/api_gateway.md#middleware)
+- [ ] [MVP for OpenAPI and SwaggerUI](https://github.com/aws-powertools/powertools-lambda-python/pull/3109)
- [ ] [MVP for AppSync Batch invoke and partial failure support](https://github.com/aws-powertools/powertools-lambda-python/pull/1998)
### Lambda Layer in release notes
@@ -112,10 +113,10 @@ We want to investigate security and scaling requirements for these special regio
**Major updates**
-- [ ] Gather agencies and customers name to prioritize it
-- [ ] Investigate security requirements for special regions
-- [ ] Update CDK Layer construct to include regions
+- [x] Gather agencies and customers name to prioritize it
+- [x] Investigate security requirements for special regions
- [ ] Create additional infrastructure for special regions
+- [ ] Update CDK Layer construct to include regions
### V3
diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md
index fd4a176f631..7cc966313fb 100644
--- a/docs/utilities/data_classes.md
+++ b/docs/utilities/data_classes.md
@@ -103,7 +103,8 @@ Log Data Event for Troubleshooting
| [SES](#ses) | `SESEvent` |
| [SNS](#sns) | `SNSEvent` |
| [SQS](#sqs) | `SQSEvent` |
-| [VPC Lattice](#vpc-lattice) | `VPCLatticeEvent` |
+| [VPC Lattice V2](#vpc-lattice-v2) | `VPCLatticeV2Event` |
+| [VPC Lattice V1](#vpc-lattice-v1) | `VPCLatticeEvent` |
???+ info
The examples provided below are far from exhaustive - the data classes themselves are designed to provide a form of
@@ -1180,7 +1181,25 @@ AWS Secrets Manager rotation uses an AWS Lambda function to update the secret. [
do_something_with(record.body)
```
-### VPC Lattice
+### VPC Lattice V2
+
+You can register your Lambda functions as targets within an Amazon VPC Lattice service network. By doing this, your Lambda function becomes a service within the network, and clients that have access to the VPC Lattice service network can call your service using [Payload V2](https://docs.aws.amazon.com/lambda/latest/dg/services-vpc-lattice.html#vpc-lattice-receiving-events){target="_blank"}.
+
+[Click here](https://docs.aws.amazon.com/lambda/latest/dg/services-vpc-lattice.html){target="_blank"} for more information about using AWS Lambda with Amazon VPC Lattice.
+
+=== "app.py"
+
+ ```python hl_lines="2 8"
+ --8<-- "examples/event_sources/src/vpc_lattice_v2.py"
+ ```
+
+=== "Lattice Example Event"
+
+ ```json
+ --8<-- "examples/event_sources/src/vpc_lattice_v2_payload.json"
+ ```
+
+### VPC Lattice V1
You can register your Lambda functions as targets within an Amazon VPC Lattice service network. By doing this, your Lambda function becomes a service within the network, and clients that have access to the VPC Lattice service network can call your service.
diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md
index 846460e43d2..8f0a7bbd06f 100644
--- a/docs/utilities/parser.md
+++ b/docs/utilities/parser.md
@@ -118,6 +118,12 @@ handler(event=payload, context=LambdaContext())
handler(event=json.dumps(payload), context=LambdaContext()) # also works if event is a JSON string
```
+Alternatively, you can automatically extract the model from the `event` without the need to include the model parameter in the `event_parser` function.
+
+```python hl_lines="23 24"
+ --8<-- "examples/parser/src/using_the_model_from_event.py"
+```
+
#### parse function
Use this standalone function when you want more control over the data validation process, for example returning a 400 error for malformed payloads.
@@ -192,6 +198,7 @@ Parser comes with the following built-in models:
| **SnsModel** | Lambda Event Source payload for Amazon Simple Notification Service |
| **SqsModel** | Lambda Event Source payload for Amazon SQS |
| **VpcLatticeModel** | Lambda Event Source payload for Amazon VPC Lattice |
+| **VpcLatticeV2Model** | Lambda Event Source payload for Amazon VPC Lattice v2 payload |
#### Extending built-in models
diff --git a/examples/event_handler_rest/src/getting_started_vpclatticev2_resolver.json b/examples/event_handler_rest/src/getting_started_vpclatticev2_resolver.json
new file mode 100644
index 00000000000..38c94683432
--- /dev/null
+++ b/examples/event_handler_rest/src/getting_started_vpclatticev2_resolver.json
@@ -0,0 +1,29 @@
+{
+ "version": "2.0",
+ "path": "/todos",
+ "method": "GET",
+ "headers": {
+ "user_agent": "curl/7.64.1",
+ "x-forwarded-for": "10.213.229.10",
+ "host": "test-lambda-service-3908sdf9u3u.dkfjd93.vpc-lattice-svcs.us-east-2.on.aws",
+ "accept": "*/*"
+ },
+ "queryStringParameters": {
+ "order-id": "1"
+ },
+ "body": "{\"message\": \"Hello from Lambda!\"}",
+ "requestContext": {
+ "serviceNetworkArn": "arn:aws:vpc-lattice:us-east-2:123456789012:servicenetwork/sn-0bf3f2882e9cc805a",
+ "serviceArn": "arn:aws:vpc-lattice:us-east-2:123456789012:service/svc-0a40eebed65f8d69c",
+ "targetGroupArn": "arn:aws:vpc-lattice:us-east-2:123456789012:targetgroup/tg-6d0ecf831eec9f09",
+ "identity": {
+ "sourceVpcArn": "arn:aws:ec2:region:123456789012:vpc/vpc-0b8276c84697e7339",
+ "type" : "AWS_IAM",
+ "principal": "arn:aws:sts::123456789012:assumed-role/example-role/057d00f8b51257ba3c853a0f248943cf",
+ "sessionName": "057d00f8b51257ba3c853a0f248943cf",
+ "x509SanDns": "example.com"
+ },
+ "region": "us-east-2",
+ "timeEpoch": "1696331543569073"
+ }
+}
diff --git a/examples/event_handler_rest/src/getting_started_vpclatticev2_resolver.py b/examples/event_handler_rest/src/getting_started_vpclatticev2_resolver.py
new file mode 100644
index 00000000000..4cf61caecaf
--- /dev/null
+++ b/examples/event_handler_rest/src/getting_started_vpclatticev2_resolver.py
@@ -0,0 +1,28 @@
+import requests
+from requests import Response
+
+from aws_lambda_powertools import Logger, Tracer
+from aws_lambda_powertools.event_handler import VPCLatticeV2Resolver
+from aws_lambda_powertools.logging import correlation_paths
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+tracer = Tracer()
+logger = Logger()
+app = VPCLatticeV2Resolver()
+
+
+@app.get("/todos")
+@tracer.capture_method
+def get_todos():
+ todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos")
+ todos.raise_for_status()
+
+ # for brevity, we'll limit to the first 10 only
+ return {"todos": todos.json()[:10]}
+
+
+# You can continue to use other utilities just as before
+@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPLICATION_LOAD_BALANCER)
+@tracer.capture_lambda_handler
+def lambda_handler(event: dict, context: LambdaContext) -> dict:
+ return app.resolve(event, context)
diff --git a/examples/event_sources/src/vpc_lattice_v2.py b/examples/event_sources/src/vpc_lattice_v2.py
new file mode 100644
index 00000000000..0d11328bd76
--- /dev/null
+++ b/examples/event_sources/src/vpc_lattice_v2.py
@@ -0,0 +1,20 @@
+from aws_lambda_powertools import Logger
+from aws_lambda_powertools.utilities.data_classes import VPCLatticeEventV2, event_source
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+logger = Logger()
+
+
+@event_source(data_class=VPCLatticeEventV2)
+def lambda_handler(event: VPCLatticeEventV2, context: LambdaContext):
+ logger.info(event.body)
+
+ response = {
+ "isBase64Encoded": False,
+ "statusCode": 200,
+ "statusDescription": "200 OK",
+ "headers": {"Content-Type": "application/text"},
+ "body": "VPC Lattice V2 Event ✨🎉✨",
+ }
+
+ return response
diff --git a/examples/event_sources/src/vpc_lattice_v2_payload.json b/examples/event_sources/src/vpc_lattice_v2_payload.json
new file mode 100644
index 00000000000..38c94683432
--- /dev/null
+++ b/examples/event_sources/src/vpc_lattice_v2_payload.json
@@ -0,0 +1,29 @@
+{
+ "version": "2.0",
+ "path": "/todos",
+ "method": "GET",
+ "headers": {
+ "user_agent": "curl/7.64.1",
+ "x-forwarded-for": "10.213.229.10",
+ "host": "test-lambda-service-3908sdf9u3u.dkfjd93.vpc-lattice-svcs.us-east-2.on.aws",
+ "accept": "*/*"
+ },
+ "queryStringParameters": {
+ "order-id": "1"
+ },
+ "body": "{\"message\": \"Hello from Lambda!\"}",
+ "requestContext": {
+ "serviceNetworkArn": "arn:aws:vpc-lattice:us-east-2:123456789012:servicenetwork/sn-0bf3f2882e9cc805a",
+ "serviceArn": "arn:aws:vpc-lattice:us-east-2:123456789012:service/svc-0a40eebed65f8d69c",
+ "targetGroupArn": "arn:aws:vpc-lattice:us-east-2:123456789012:targetgroup/tg-6d0ecf831eec9f09",
+ "identity": {
+ "sourceVpcArn": "arn:aws:ec2:region:123456789012:vpc/vpc-0b8276c84697e7339",
+ "type" : "AWS_IAM",
+ "principal": "arn:aws:sts::123456789012:assumed-role/example-role/057d00f8b51257ba3c853a0f248943cf",
+ "sessionName": "057d00f8b51257ba3c853a0f248943cf",
+ "x509SanDns": "example.com"
+ },
+ "region": "us-east-2",
+ "timeEpoch": "1696331543569073"
+ }
+}
diff --git a/examples/logger/sam/template.yaml b/examples/logger/sam/template.yaml
index d0f0d00d0d0..16374cc6e4f 100644
--- a/examples/logger/sam/template.yaml
+++ b/examples/logger/sam/template.yaml
@@ -14,7 +14,7 @@ Globals:
Layers:
# Find the latest Layer version in the official documentation
# https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:44
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:45
Resources:
LoggerLambdaHandlerExample:
diff --git a/examples/logger/src/append_keys_extra_output.json b/examples/logger/src/append_keys_extra_output.json
index b25abb226a1..3d1c5ed5eaf 100644
--- a/examples/logger/src/append_keys_extra_output.json
+++ b/examples/logger/src/append_keys_extra_output.json
@@ -2,7 +2,7 @@
"level": "INFO",
"location": "collect.handler:9",
"message": "Collecting payment",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"request_id": "1123"
}
diff --git a/examples/logger/src/append_keys_kwargs_output.json b/examples/logger/src/append_keys_kwargs_output.json
index cd888e5e2af..3923d3e66c4 100644
--- a/examples/logger/src/append_keys_kwargs_output.json
+++ b/examples/logger/src/append_keys_kwargs_output.json
@@ -2,7 +2,7 @@
"level": "INFO",
"location": "collect.handler:8",
"message": "Collecting payment",
- "timestamp": "2022-11-26 11:47:12,494+0200",
+ "timestamp": "2022-11-26 11:47:12,494+0000",
"service": "payment",
"request_id": "1123"
}
\ No newline at end of file
diff --git a/examples/logger/src/append_keys_output.json b/examples/logger/src/append_keys_output.json
index 1e6d38bf785..b4eb223484c 100644
--- a/examples/logger/src/append_keys_output.json
+++ b/examples/logger/src/append_keys_output.json
@@ -2,7 +2,7 @@
"level": "INFO",
"location": "collect.handler:11",
"message": "Collecting payment",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"order_id": "order_id_value"
}
diff --git a/examples/logger/src/bring_your_own_formatter_output.json b/examples/logger/src/bring_your_own_formatter_output.json
index 19869b7b885..d23cbcb5721 100644
--- a/examples/logger/src/bring_your_own_formatter_output.json
+++ b/examples/logger/src/bring_your_own_formatter_output.json
@@ -1,7 +1,7 @@
{
"level": "INFO",
"location": ":16",
- "timestamp": "2021-12-30 13:41:53,413+0100",
+ "timestamp": "2021-12-30 13:41:53,413+0000",
"service": "payment",
"event": "hello"
}
diff --git a/examples/logger/src/clear_state_event_one.json b/examples/logger/src/clear_state_event_one.json
index 394624ed8f6..99d9b3c7484 100644
--- a/examples/logger/src/clear_state_event_one.json
+++ b/examples/logger/src/clear_state_event_one.json
@@ -2,7 +2,7 @@
"level": "INFO",
"location": "collect.handler:10",
"message": "Collecting payment",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"special_key": "debug_key",
"cold_start": true,
diff --git a/examples/logger/src/clear_state_event_two.json b/examples/logger/src/clear_state_event_two.json
index 93f59b50f82..66c4893531d 100644
--- a/examples/logger/src/clear_state_event_two.json
+++ b/examples/logger/src/clear_state_event_two.json
@@ -2,7 +2,7 @@
"level": "INFO",
"location": "collect.handler:10",
"message": "Collecting payment",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"cold_start": false,
"function_name": "test",
diff --git a/examples/logger/src/date_formatting_output.json b/examples/logger/src/date_formatting_output.json
index 2d5ba1dff88..6adebb0de2a 100644
--- a/examples/logger/src/date_formatting_output.json
+++ b/examples/logger/src/date_formatting_output.json
@@ -3,7 +3,7 @@
"level": "INFO",
"location": ":6",
"message": "Collecting payment",
- "timestamp": "2022-10-28T14:35:03.210+02:00",
+ "timestamp": "2022-10-28T14:35:03.210+00:00",
"service": "payment"
},
{
diff --git a/examples/logger/src/inject_lambda_context_output.json b/examples/logger/src/inject_lambda_context_output.json
index a9f2937dc09..a8fbe915667 100644
--- a/examples/logger/src/inject_lambda_context_output.json
+++ b/examples/logger/src/inject_lambda_context_output.json
@@ -3,7 +3,7 @@
"level": "INFO",
"location": "collect.handler:9",
"message": "Collecting payment",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"cold_start": true,
"function_name": "test",
@@ -18,7 +18,7 @@
"operation": "collect_payment",
"charge_id": "ch_AZFlk2345C0"
},
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"cold_start": true,
"function_name": "test",
diff --git a/examples/logger/src/logger_reuse_output.json b/examples/logger/src/logger_reuse_output.json
index 9dbce381ea3..e771684b922 100644
--- a/examples/logger/src/logger_reuse_output.json
+++ b/examples/logger/src/logger_reuse_output.json
@@ -2,7 +2,7 @@
"level": "INFO",
"location": "collect.handler:12",
"message": "Collecting payment",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"cold_start": true,
"function_name": "test",
diff --git a/examples/logger/src/logging_exceptions_output.json b/examples/logger/src/logging_exceptions_output.json
index 8f3011e3a87..15831f9b776 100644
--- a/examples/logger/src/logging_exceptions_output.json
+++ b/examples/logger/src/logging_exceptions_output.json
@@ -2,7 +2,7 @@
"level": "ERROR",
"location": "collect.handler:15",
"message": "Received a HTTP 5xx error",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"exception_name": "RuntimeError",
"exception": "Traceback (most recent call last):\n File \"\", line 2, in RuntimeError: Unable to fullfil request"
diff --git a/examples/logger/src/logging_stacktrace.py b/examples/logger/src/logging_stacktrace.py
new file mode 100644
index 00000000000..128836f5138
--- /dev/null
+++ b/examples/logger/src/logging_stacktrace.py
@@ -0,0 +1,18 @@
+import requests
+
+from aws_lambda_powertools import Logger
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+ENDPOINT = "http://httpbin.org/status/500"
+logger = Logger(serialize_stacktrace=True)
+
+
+def lambda_handler(event: dict, context: LambdaContext) -> str:
+ try:
+ ret = requests.get(ENDPOINT)
+ ret.raise_for_status()
+ except requests.HTTPError as e:
+ logger.exception(e)
+ raise RuntimeError("Unable to fullfil request") from e
+
+ return "hello world"
diff --git a/examples/logger/src/logging_stacktrace_output.json b/examples/logger/src/logging_stacktrace_output.json
new file mode 100644
index 00000000000..d972f3ff6a9
--- /dev/null
+++ b/examples/logger/src/logging_stacktrace_output.json
@@ -0,0 +1,28 @@
+{
+ "level":"ERROR",
+ "location":"lambda_handler:16",
+ "message":"500 Server Error: INTERNAL SERVER ERROR for url: http://httpbin.org/status/500",
+ "timestamp":"2023-10-09 17:47:50,191+0000",
+ "service":"service_undefined",
+ "exception":"Traceback (most recent call last):\n File \"/var/task/app.py\", line 14, in lambda_handler\n ret.raise_for_status()\n File \"/var/task/requests/models.py\", line 1021, in raise_for_status\n raise HTTPError(http_error_msg, response=self)\nrequests.exceptions.HTTPError: 500 Server Error: INTERNAL SERVER ERROR for url: http://httpbin.org/status/500",
+ "exception_name":"HTTPError",
+ "stack_trace":{
+ "type":"HTTPError",
+ "value":"500 Server Error: INTERNAL SERVER ERROR for url: http://httpbin.org/status/500",
+ "module":"requests.exceptions",
+ "frames":[
+ {
+ "file":"/var/task/app.py",
+ "line":14,
+ "function":"lambda_handler",
+ "statement":"ret.raise_for_status()"
+ },
+ {
+ "file":"/var/task/requests/models.py",
+ "line":1021,
+ "function":"raise_for_status",
+ "statement":"raise HTTPError(http_error_msg, response=self)"
+ }
+ ]
+ }
+ }
diff --git a/examples/logger/src/logging_uncaught_exceptions_output.json b/examples/logger/src/logging_uncaught_exceptions_output.json
index c8ff16e55b5..7952ba9e873 100644
--- a/examples/logger/src/logging_uncaught_exceptions_output.json
+++ b/examples/logger/src/logging_uncaught_exceptions_output.json
@@ -2,7 +2,7 @@
"level": "ERROR",
"location": "log_uncaught_exception_hook:756",
"message": "500 Server Error: INTERNAL SERVER ERROR for url: http://httpbin.org/status/500",
- "timestamp": "2022-11-16 13:51:29,198+0100",
+ "timestamp": "2022-11-16 13:51:29,198+0000",
"service": "payment",
"exception": "Traceback (most recent call last):\n File \"\", line 52, in \n handler({}, {})\n File \"\", line 17, in handler\n ret.raise_for_status()\n File \"/lib/python3.9/site-packages/requests/models.py\", line 1021, in raise_for_status\n raise HTTPError(http_error_msg, response=self)\nrequests.exceptions.HTTPError: 500 Server Error: INTERNAL SERVER ERROR for url: http://httpbin.org/status/500",
"exception_name": "HTTPError"
diff --git a/examples/logger/src/overriding_log_records_output.json b/examples/logger/src/overriding_log_records_output.json
index 676f36ca298..93910304c6e 100644
--- a/examples/logger/src/overriding_log_records_output.json
+++ b/examples/logger/src/overriding_log_records_output.json
@@ -3,13 +3,13 @@
"level": "INFO",
"location": "[] overriding_log_records",
"message": "Collecting payment",
- "timestamp": "2022-10-28 14:40:43,801+0200",
+ "timestamp": "2022-10-28 14:40:43,801+0000",
"service": "payment"
},
{
"level": "INFO",
"message": "Calculating points",
- "timestamp": "2022-10-28 14:40:43,801+0200",
+ "timestamp": "2022-10-28 14:40:43,801+0000",
"service": "loyalty"
}
]
\ No newline at end of file
diff --git a/examples/logger/src/remove_keys_output.json b/examples/logger/src/remove_keys_output.json
index 4ec8740784e..ee89ed8e2a1 100644
--- a/examples/logger/src/remove_keys_output.json
+++ b/examples/logger/src/remove_keys_output.json
@@ -3,7 +3,7 @@
"level": "INFO",
"location": "collect.handler:9",
"message": "Collecting payment",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"sample_key": "value"
},
@@ -11,7 +11,7 @@
"level": "INFO",
"location": "collect.handler:12",
"message": "Collecting payment without sample key",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment"
}
]
diff --git a/examples/logger/src/reordering_log_keys_output.json b/examples/logger/src/reordering_log_keys_output.json
index c89f7cb48bd..c0052808002 100644
--- a/examples/logger/src/reordering_log_keys_output.json
+++ b/examples/logger/src/reordering_log_keys_output.json
@@ -3,14 +3,14 @@
"message": "hello world",
"level": "INFO",
"location": ":11",
- "timestamp": "2022-06-24 11:25:40,143+0200",
+ "timestamp": "2022-06-24 11:25:40,143+0000",
"service": "payment"
},
{
"request_id": "123",
"level": "INFO",
"location": ":12",
- "timestamp": "2022-06-24 11:25:40,144+0200",
+ "timestamp": "2022-06-24 11:25:40,144+0000",
"service": "order",
"message": "hello universe"
}
diff --git a/examples/logger/src/sampling_debug_logs_output.json b/examples/logger/src/sampling_debug_logs_output.json
index dbeb28edb8a..a889b538bc7 100644
--- a/examples/logger/src/sampling_debug_logs_output.json
+++ b/examples/logger/src/sampling_debug_logs_output.json
@@ -3,7 +3,7 @@
"level": "DEBUG",
"location": "collect.handler:7",
"message": "Verifying whether order_id is present",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"cold_start": true,
"function_name": "test",
@@ -16,7 +16,7 @@
"level": "INFO",
"location": "collect.handler:7",
"message": "Collecting payment",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"cold_start": true,
"function_name": "test",
diff --git a/examples/logger/src/set_correlation_id_jmespath_output.json b/examples/logger/src/set_correlation_id_jmespath_output.json
index 0744a0fc8d1..3ba6a90b684 100644
--- a/examples/logger/src/set_correlation_id_jmespath_output.json
+++ b/examples/logger/src/set_correlation_id_jmespath_output.json
@@ -2,7 +2,7 @@
"level": "INFO",
"location": "collect.handler:11",
"message": "Collecting payment",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"cold_start": true,
"function_name": "test",
diff --git a/examples/logger/src/set_correlation_id_method_output.json b/examples/logger/src/set_correlation_id_method_output.json
index f78d26740ae..c1f336c5390 100644
--- a/examples/logger/src/set_correlation_id_method_output.json
+++ b/examples/logger/src/set_correlation_id_method_output.json
@@ -2,7 +2,7 @@
"level": "INFO",
"location": "collect.handler:13",
"message": "Collecting payment",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"correlation_id": "correlation_id_value"
}
diff --git a/examples/logger/src/set_correlation_id_output.json b/examples/logger/src/set_correlation_id_output.json
index 3cc205e32e2..95b497a9500 100644
--- a/examples/logger/src/set_correlation_id_output.json
+++ b/examples/logger/src/set_correlation_id_output.json
@@ -2,7 +2,7 @@
"level": "INFO",
"location": "collect.handler:10",
"message": "Collecting payment",
- "timestamp": "2021-05-03 11:47:12,494+0200",
+ "timestamp": "2021-05-03 11:47:12,494+0000",
"service": "payment",
"cold_start": true,
"function_name": "test",
diff --git a/examples/logger/src/setting_utc_timestamp.py b/examples/logger/src/setting_utc_timestamp.py
index a454e216d75..7f893823677 100644
--- a/examples/logger/src/setting_utc_timestamp.py
+++ b/examples/logger/src/setting_utc_timestamp.py
@@ -1,7 +1,13 @@
+import os
+import time
+
from aws_lambda_powertools import Logger
-logger = Logger(service="payment")
-logger.info("Local time")
+logger_in_utc = Logger(service="payment")
+logger_in_utc.info("Logging with default AWS Lambda timezone: UTC time")
+
+os.environ["TZ"] = "US/Eastern"
+time.tzset() # (1)!
-logger_in_utc = Logger(service="order", utc=True)
-logger_in_utc.info("GMT time zone")
+logger = Logger(service="order")
+logger.info("Logging with US Eastern timezone")
diff --git a/examples/logger/src/setting_utc_timestamp_output.json b/examples/logger/src/setting_utc_timestamp_output.json
index 80083fbf61b..4e35bf48450 100644
--- a/examples/logger/src/setting_utc_timestamp_output.json
+++ b/examples/logger/src/setting_utc_timestamp_output.json
@@ -1,16 +1,16 @@
[
{
- "level": "INFO",
- "location": ":4",
- "message": "Local time",
- "timestamp": "2022-06-24 11:39:49,421+0200",
- "service": "payment"
+ "level":"INFO",
+ "location":":7",
+ "message":"Logging with default AWS Lambda timezone: UTC time",
+ "timestamp":"2023-10-09 21:33:55,733+0000",
+ "service":"payment"
},
{
- "level": "INFO",
- "location": ":7",
- "message": "GMT time zone",
- "timestamp": "2022-06-24 09:39:49,421+0100",
- "service": "order"
+ "level":"INFO",
+ "location":":13",
+ "message":"Logging with US Eastern timezone",
+ "timestamp":"2023-10-09 17:33:55,734-0400",
+ "service":"order"
}
]
diff --git a/examples/logger/src/unserializable_values_output.json b/examples/logger/src/unserializable_values_output.json
index ed7770cab03..744fd82a219 100644
--- a/examples/logger/src/unserializable_values_output.json
+++ b/examples/logger/src/unserializable_values_output.json
@@ -5,6 +5,6 @@
"ingestion_time": "2022-06-24T10:12:09.526365",
"serialize_me": ""
},
- "timestamp": "2022-06-24 12:12:09,526+0200",
+ "timestamp": "2022-06-24 12:12:09,526+0000",
"service": "payment"
}
diff --git a/examples/metrics/sam/template.yaml b/examples/metrics/sam/template.yaml
index 9ba4ab2c666..d32ec5cd240 100644
--- a/examples/metrics/sam/template.yaml
+++ b/examples/metrics/sam/template.yaml
@@ -15,7 +15,7 @@ Globals:
Layers:
# Find the latest Layer version in the official documentation
# https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:44
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:45
Resources:
CaptureLambdaHandlerExample:
diff --git a/examples/parser/src/using_the_model_from_event.py b/examples/parser/src/using_the_model_from_event.py
new file mode 100644
index 00000000000..41e3116c61a
--- /dev/null
+++ b/examples/parser/src/using_the_model_from_event.py
@@ -0,0 +1,27 @@
+import json
+
+from pydantic import BaseModel, validator
+
+from aws_lambda_powertools.utilities.parser import event_parser
+from aws_lambda_powertools.utilities.parser.models import APIGatewayProxyEventV2Model
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+class CancelOrder(BaseModel):
+ order_id: int
+ reason: str
+
+
+class CancelOrderModel(APIGatewayProxyEventV2Model):
+ body: CancelOrder # type: ignore[assignment]
+
+ @validator("body", pre=True)
+ def transform_body_to_dict(cls, value: str):
+ return json.loads(value)
+
+
+@event_parser
+def handler(event: CancelOrderModel, context: LambdaContext):
+ cancel_order: CancelOrder = event.body
+
+ assert cancel_order.order_id is not None
diff --git a/examples/tracer/sam/template.yaml b/examples/tracer/sam/template.yaml
index fff32e10dec..4604a93fe2e 100644
--- a/examples/tracer/sam/template.yaml
+++ b/examples/tracer/sam/template.yaml
@@ -13,7 +13,7 @@ Globals:
Layers:
# Find the latest Layer version in the official documentation
# https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:44
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:45
Resources:
CaptureLambdaHandlerExample:
diff --git a/layer/poetry.lock b/layer/poetry.lock
index 5b2de457c82..7168cfd7eed 100644
--- a/layer/poetry.lock
+++ b/layer/poetry.lock
@@ -1,10 +1,9 @@
-# This file is automatically @generated by Poetry and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
[[package]]
name = "attrs"
version = "23.1.0"
description = "Classes Without Boilerplate"
-category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -23,7 +22,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte
name = "aws-cdk-asset-awscli-v1"
version = "2.2.200"
description = "A library that contains the AWS CLI for use in Lambda Layers"
-category = "main"
optional = false
python-versions = "~=3.7"
files = [
@@ -40,7 +38,6 @@ typeguard = ">=2.13.3,<2.14.0"
name = "aws-cdk-asset-kubectl-v20"
version = "2.1.2"
description = "A library that contains kubectl for use in Lambda Layers"
-category = "main"
optional = false
python-versions = "~=3.7"
files = [
@@ -57,7 +54,6 @@ typeguard = ">=2.13.3,<2.14.0"
name = "aws-cdk-asset-node-proxy-agent-v5"
version = "2.0.166"
description = "@aws-cdk/asset-node-proxy-agent-v5"
-category = "main"
optional = false
python-versions = "~=3.7"
files = [
@@ -74,7 +70,6 @@ typeguard = ">=2.13.3,<2.14.0"
name = "aws-cdk-lib"
version = "2.88.0"
description = "Version 2 of the AWS Cloud Development Kit library"
-category = "main"
optional = false
python-versions = "~=3.7"
files = [
@@ -95,7 +90,6 @@ typeguard = ">=2.13.3,<2.14.0"
name = "boto3"
version = "1.28.8"
description = "The AWS SDK for Python"
-category = "dev"
optional = false
python-versions = ">= 3.7"
files = [
@@ -115,7 +109,6 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
name = "botocore"
version = "1.31.8"
description = "Low-level, data-driven core of boto 3."
-category = "dev"
optional = false
python-versions = ">= 3.7"
files = [
@@ -135,7 +128,6 @@ crt = ["awscrt (==0.16.26)"]
name = "cattrs"
version = "23.1.2"
description = "Composable complex class support for attrs and dataclasses."
-category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -161,7 +153,6 @@ ujson = ["ujson (>=5.4.0,<6.0.0)"]
name = "cdk-aws-lambda-powertools-layer"
version = "3.6.0"
description = "Powertools for AWS Lambda layer for python and typescript"
-category = "main"
optional = false
python-versions = "~=3.7"
files = [
@@ -180,7 +171,6 @@ typeguard = ">=2.13.3,<2.14.0"
name = "colorama"
version = "0.4.6"
description = "Cross-platform colored terminal text."
-category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [
@@ -192,7 +182,6 @@ files = [
name = "constructs"
version = "10.2.69"
description = "A programming model for software-defined state"
-category = "main"
optional = false
python-versions = "~=3.7"
files = [
@@ -209,7 +198,6 @@ typeguard = ">=2.13.3,<2.14.0"
name = "exceptiongroup"
version = "1.1.2"
description = "Backport of PEP 654 (exception groups)"
-category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -224,7 +212,6 @@ test = ["pytest (>=6)"]
name = "importlib-resources"
version = "6.0.0"
description = "Read resources from Python packages"
-category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -243,7 +230,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)",
name = "iniconfig"
version = "2.0.0"
description = "brain-dead simple config-ini parsing"
-category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -255,7 +241,6 @@ files = [
name = "jmespath"
version = "1.0.1"
description = "JSON Matching Expressions"
-category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -267,7 +252,6 @@ files = [
name = "jsii"
version = "1.85.0"
description = "Python client for jsii runtime"
-category = "main"
optional = false
python-versions = "~=3.7"
files = [
@@ -288,7 +272,6 @@ typing-extensions = ">=3.7,<5.0"
name = "packaging"
version = "23.1"
description = "Core utilities for Python packages"
-category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -300,7 +283,6 @@ files = [
name = "pluggy"
version = "1.2.0"
description = "plugin and hook calling mechanisms for python"
-category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -316,7 +298,6 @@ testing = ["pytest", "pytest-benchmark"]
name = "publication"
version = "0.0.3"
description = "Publication helps you maintain public-api-friendly modules by preventing unintentional access to private implementation details via introspection."
-category = "main"
optional = false
python-versions = "*"
files = [
@@ -328,7 +309,6 @@ files = [
name = "pytest"
version = "7.4.0"
description = "pytest: simple powerful testing with Python"
-category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -351,7 +331,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
-category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
@@ -366,7 +345,6 @@ six = ">=1.5"
name = "s3transfer"
version = "0.6.1"
description = "An Amazon S3 Transfer Manager"
-category = "dev"
optional = false
python-versions = ">= 3.7"
files = [
@@ -384,7 +362,6 @@ crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
-category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
@@ -396,7 +373,6 @@ files = [
name = "tomli"
version = "2.0.1"
description = "A lil' TOML parser"
-category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -408,7 +384,6 @@ files = [
name = "typeguard"
version = "2.13.3"
description = "Run-time type checker for Python"
-category = "main"
optional = false
python-versions = ">=3.5.3"
files = [
@@ -424,7 +399,6 @@ test = ["mypy", "pytest", "typing-extensions"]
name = "typing-extensions"
version = "4.7.1"
description = "Backported and Experimental Type Hints for Python 3.7+"
-category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -434,18 +408,17 @@ files = [
[[package]]
name = "urllib3"
-version = "1.26.16"
+version = "1.26.17"
description = "HTTP library with thread-safe connection pooling, file post, and more."
-category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
files = [
- {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"},
- {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"},
+ {file = "urllib3-1.26.17-py2.py3-none-any.whl", hash = "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b"},
+ {file = "urllib3-1.26.17.tar.gz", hash = "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21"},
]
[package.extras]
-brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
@@ -453,7 +426,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
name = "zipp"
version = "3.16.2"
description = "Backport of pathlib-compatible object wrapper for zip files"
-category = "main"
optional = false
python-versions = ">=3.8"
files = [
diff --git a/layer/scripts/layer-balancer/go.mod b/layer/scripts/layer-balancer/go.mod
index 65eba4a5add..3e7df7a8d0c 100644
--- a/layer/scripts/layer-balancer/go.mod
+++ b/layer/scripts/layer-balancer/go.mod
@@ -3,24 +3,24 @@ module layerbalancer
go 1.18
require (
- github.com/aws/aws-sdk-go-v2 v1.21.0
- github.com/aws/aws-sdk-go-v2/config v1.18.41
- github.com/aws/aws-sdk-go-v2/service/lambda v1.39.5
+ github.com/aws/aws-sdk-go-v2 v1.21.2
+ github.com/aws/aws-sdk-go-v2/config v1.18.45
+ github.com/aws/aws-sdk-go-v2/service/lambda v1.40.0
golang.org/x/exp v0.0.0-20230321023759-10a507213a29
- golang.org/x/sync v0.3.0
+ golang.org/x/sync v0.4.0
)
require (
- github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.13 // indirect
- github.com/aws/aws-sdk-go-v2/credentials v1.13.39 // indirect
- github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.11 // indirect
- github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.41 // indirect
- github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.35 // indirect
- github.com/aws/aws-sdk-go-v2/internal/ini v1.3.42 // indirect
- github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.35 // indirect
- github.com/aws/aws-sdk-go-v2/service/sso v1.14.0 // indirect
- github.com/aws/aws-sdk-go-v2/service/ssooidc v1.17.0 // indirect
- github.com/aws/aws-sdk-go-v2/service/sts v1.22.0 // indirect
- github.com/aws/smithy-go v1.14.2 // indirect
+ github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.14 // indirect
+ github.com/aws/aws-sdk-go-v2/credentials v1.13.43 // indirect
+ github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.13 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.43 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.37 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/ini v1.3.45 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.37 // indirect
+ github.com/aws/aws-sdk-go-v2/service/sso v1.15.2 // indirect
+ github.com/aws/aws-sdk-go-v2/service/ssooidc v1.17.3 // indirect
+ github.com/aws/aws-sdk-go-v2/service/sts v1.23.2 // indirect
+ github.com/aws/smithy-go v1.15.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
)
diff --git a/layer/scripts/layer-balancer/go.sum b/layer/scripts/layer-balancer/go.sum
index 799db3dbd73..dd0a50e50c5 100644
--- a/layer/scripts/layer-balancer/go.sum
+++ b/layer/scripts/layer-balancer/go.sum
@@ -1,31 +1,31 @@
-github.com/aws/aws-sdk-go-v2 v1.21.0 h1:gMT0IW+03wtYJhRqTVYn0wLzwdnK9sRMcxmtfGzRdJc=
-github.com/aws/aws-sdk-go-v2 v1.21.0/go.mod h1:/RfNgGmRxI+iFOB1OeJUyxiU+9s88k3pfHvDagGEp0M=
-github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.13 h1:OPLEkmhXf6xFPiz0bLeDArZIDx1NNS4oJyG4nv3Gct0=
-github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.13/go.mod h1:gpAbvyDGQFozTEmlTFO8XcQKHzubdq0LzRyJpG6MiXM=
-github.com/aws/aws-sdk-go-v2/config v1.18.41 h1:Go7z97YDsBJVNAaL7pDPKB6LeHEsAkHmFe+CeK30fUQ=
-github.com/aws/aws-sdk-go-v2/config v1.18.41/go.mod h1:+yR45+A0LIMKT8bWOKo90Hy9rSrovEmEKoPKLmmVec8=
-github.com/aws/aws-sdk-go-v2/credentials v1.13.39 h1:UnwBXDIHKDaejSXaRzKR57IdGCizk+z1DEhnsFpus7Q=
-github.com/aws/aws-sdk-go-v2/credentials v1.13.39/go.mod h1:OJ9P239A90TnglJEF3qofKiNeEM6PCV/m+aNGV5WC24=
-github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.11 h1:uDZJF1hu0EVT/4bogChk8DyjSF6fof6uL/0Y26Ma7Fg=
-github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.11/go.mod h1:TEPP4tENqBGO99KwVpV9MlOX4NSrSLP8u3KRy2CDwA8=
-github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.41 h1:22dGT7PneFMx4+b3pz7lMTRyN8ZKH7M2cW4GP9yUS2g=
-github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.41/go.mod h1:CrObHAuPneJBlfEJ5T3szXOUkLEThaGfvnhTf33buas=
-github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.35 h1:SijA0mgjV8E+8G45ltVHs0fvKpTj8xmZJ3VwhGKtUSI=
-github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.35/go.mod h1:SJC1nEVVva1g3pHAIdCp7QsRIkMmLAgoDquQ9Rr8kYw=
-github.com/aws/aws-sdk-go-v2/internal/ini v1.3.42 h1:GPUcE/Yq7Ur8YSUk6lVkoIMWnJNO0HT18GUzCWCgCI0=
-github.com/aws/aws-sdk-go-v2/internal/ini v1.3.42/go.mod h1:rzfdUlfA+jdgLDmPKjd3Chq9V7LVLYo1Nz++Wb91aRo=
-github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.35 h1:CdzPW9kKitgIiLV1+MHobfR5Xg25iYnyzWZhyQuSlDI=
-github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.35/go.mod h1:QGF2Rs33W5MaN9gYdEQOBBFPLwTZkEhRwI33f7KIG0o=
-github.com/aws/aws-sdk-go-v2/service/lambda v1.39.5 h1:uMvxJFS92hNW6BRX0Ou+5zb9DskgrJQHZ+5yT8FXK5Y=
-github.com/aws/aws-sdk-go-v2/service/lambda v1.39.5/go.mod h1:ByLHcf0zbHpyLTOy1iPVRPJWmAUPCiJv5k81dt52ID8=
-github.com/aws/aws-sdk-go-v2/service/sso v1.14.0 h1:AR/hlTsCyk1CwlyKnPFvIMvnONydRjDDRT9OGb0i+/g=
-github.com/aws/aws-sdk-go-v2/service/sso v1.14.0/go.mod h1:fIAwKQKBFu90pBxx07BFOMJLpRUGu8VOzLJakeY+0K4=
-github.com/aws/aws-sdk-go-v2/service/ssooidc v1.17.0 h1:UniOmlPJelksyP5dGjfRoFTmLDy4/o0HH1lK2Op7zC8=
-github.com/aws/aws-sdk-go-v2/service/ssooidc v1.17.0/go.mod h1:yygr8ACQRY2PrEcy3xsUI357stq2AxnFM6DIsR9lij4=
-github.com/aws/aws-sdk-go-v2/service/sts v1.22.0 h1:s4bioTgjSFRwOoyEFzAVCmFmoowBgjTR8gkrF/sQ4wk=
-github.com/aws/aws-sdk-go-v2/service/sts v1.22.0/go.mod h1:VC7JDqsqiwXukYEDjoHh9U0fOJtNWh04FPQz4ct4GGU=
-github.com/aws/smithy-go v1.14.2 h1:MJU9hqBGbvWZdApzpvoF2WAIJDbtjK2NDJSiJP7HblQ=
-github.com/aws/smithy-go v1.14.2/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA=
+github.com/aws/aws-sdk-go-v2 v1.21.2 h1:+LXZ0sgo8quN9UOKXXzAWRT3FWd4NxeXWOZom9pE7GA=
+github.com/aws/aws-sdk-go-v2 v1.21.2/go.mod h1:ErQhvNuEMhJjweavOYhxVkn2RUx7kQXVATHrjKtxIpM=
+github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.14 h1:Sc82v7tDQ/vdU1WtuSyzZ1I7y/68j//HJ6uozND1IDs=
+github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.14/go.mod h1:9NCTOURS8OpxvoAVHq79LK81/zC78hfRWFn+aL0SPcY=
+github.com/aws/aws-sdk-go-v2/config v1.18.45 h1:Aka9bI7n8ysuwPeFdm77nfbyHCAKQ3z9ghB3S/38zes=
+github.com/aws/aws-sdk-go-v2/config v1.18.45/go.mod h1:ZwDUgFnQgsazQTnWfeLWk5GjeqTQTL8lMkoE1UXzxdE=
+github.com/aws/aws-sdk-go-v2/credentials v1.13.43 h1:LU8vo40zBlo3R7bAvBVy/ku4nxGEyZe9N8MqAeFTzF8=
+github.com/aws/aws-sdk-go-v2/credentials v1.13.43/go.mod h1:zWJBz1Yf1ZtX5NGax9ZdNjhhI4rgjfgsyk6vTY1yfVg=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.13 h1:PIktER+hwIG286DqXyvVENjgLTAwGgoeriLDD5C+YlQ=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.13/go.mod h1:f/Ib/qYjhV2/qdsf79H3QP/eRE4AkVyEf6sk7XfZ1tg=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.43 h1:nFBQlGtkbPzp/NjZLuFxRqmT91rLJkgvsEQs68h962Y=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.43/go.mod h1:auo+PiyLl0n1l8A0e8RIeR8tOzYPfZZH/JNlrJ8igTQ=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.37 h1:JRVhO25+r3ar2mKGP7E0LDl8K9/G36gjlqca5iQbaqc=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.37/go.mod h1:Qe+2KtKml+FEsQF/DHmDV+xjtche/hwoF75EG4UlHW8=
+github.com/aws/aws-sdk-go-v2/internal/ini v1.3.45 h1:hze8YsjSh8Wl1rYa1CJpRmXP21BvOBuc76YhW0HsuQ4=
+github.com/aws/aws-sdk-go-v2/internal/ini v1.3.45/go.mod h1:lD5M20o09/LCuQ2mE62Mb/iSdSlCNuj6H5ci7tW7OsE=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.37 h1:WWZA/I2K4ptBS1kg0kV1JbBtG/umed0vwHRrmcr9z7k=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.37/go.mod h1:vBmDnwWXWxNPFRMmG2m/3MKOe+xEcMDo1tanpaWCcck=
+github.com/aws/aws-sdk-go-v2/service/lambda v1.40.0 h1:M5NR3l0p/+8H0Ers+e2iKIwi2YmifUMgdTtEjZnwTeU=
+github.com/aws/aws-sdk-go-v2/service/lambda v1.40.0/go.mod h1:kFs07FNyTowZkz+dGBR33xJbzGs2mkC5Kfm6/lyR5CA=
+github.com/aws/aws-sdk-go-v2/service/sso v1.15.2 h1:JuPGc7IkOP4AaqcZSIcyqLpFSqBWK32rM9+a1g6u73k=
+github.com/aws/aws-sdk-go-v2/service/sso v1.15.2/go.mod h1:gsL4keucRCgW+xA85ALBpRFfdSLH4kHOVSnLMSuBECo=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.17.3 h1:HFiiRkf1SdaAmV3/BHOFZ9DjFynPHj8G/UIO1lQS+fk=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.17.3/go.mod h1:a7bHA82fyUXOm+ZSWKU6PIoBxrjSprdLoM8xPYvzYVg=
+github.com/aws/aws-sdk-go-v2/service/sts v1.23.2 h1:0BkLfgeDjfZnZ+MhB3ONb01u9pwFYTCZVhlsSSBvlbU=
+github.com/aws/aws-sdk-go-v2/service/sts v1.23.2/go.mod h1:Eows6e1uQEsc4ZaHANmsPRzAKcVDrcmjjWiih2+HUUQ=
+github.com/aws/smithy-go v1.15.0 h1:PS/durmlzvAFpQHDs4wi4sNNP9ExsqZh6IlfdHXgKK8=
+github.com/aws/smithy-go v1.15.0/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA=
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
@@ -39,8 +39,8 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
golang.org/x/exp v0.0.0-20230321023759-10a507213a29 h1:ooxPy7fPvB4kwsA2h+iBNHkAbp/4JxTSwCmvdjEYmug=
golang.org/x/exp v0.0.0-20230321023759-10a507213a29/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
-golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
-golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
+golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=
+golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
diff --git a/layer/scripts/layer-balancer/main.go b/layer/scripts/layer-balancer/main.go
index a4b28fd0545..cc017b79272 100644
--- a/layer/scripts/layer-balancer/main.go
+++ b/layer/scripts/layer-balancer/main.go
@@ -64,6 +64,7 @@ var regions = []string{
"eu-west-1",
"eu-west-2",
"eu-west-3",
+ "il-central-1",
"me-central-1",
"me-south-1",
"sa-east-1",
@@ -73,14 +74,8 @@ var regions = []string{
"us-west-2",
}
-var singleArchitectureRegions = []string{
- "ap-south-2",
- "ap-southeast-4",
- "eu-central-2",
- "eu-south-2",
- "me-central-1",
- "il-central-1",
-}
+// Add regions that only support x86_64
+var singleArchitectureRegions = []string{}
// getLayerVersion returns the latest version of a layer in a region
func getLayerVersion(ctx context.Context, layerName string, region string) (int64, error) {
@@ -100,7 +95,7 @@ func getLayerVersion(ctx context.Context, layerName string, region string) (int6
}
if len(layerVersionsResult.LayerVersions) == 0 {
- return 0, fmt.Errorf("no layer meets the search criteria %s - %s", layerName, region)
+ return 0, nil
}
return layerVersionsResult.LayerVersions[0].Version, nil
}
diff --git a/mkdocs.yml b/mkdocs.yml
index 1b9f4545239..0a844fd392f 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -40,6 +40,24 @@ nav:
- Automation: automation.md
- Roadmap: roadmap.md
- Maintainers: maintainers.md
+ - Contributing:
+ # - contributing/index.md
+ - Development environment: contributing/setup.md
+ - Your first contribution: contributing/getting_started.md
+ - Conventions: contributing/conventions.md
+ # - Testing:
+ # - Unit tests: contributing/testing/unit_tests.md
+ # - Functional tests: contributing/testing/functional_tests.md
+ # - End-to-end tests: contributing/testing/e2e_tests.md
+ # - Documentation:
+ # - User guide: contributing/documentation/user_guide.md
+ # - API guide: contributing/documentation/api_guide.md
+ # - Tutorials: contributing/documentation/tutorials.md
+ # - Request For Comments (RFC): contributing/documentation/rfc.md
+ # - Tracks:
+ # - Overview: contributing/tracks/overview.md
+ # - Casual to regular contributor: contributing/tracks/casual_regular_contributor.md
+ # - Customer to advocate: contributing/tracks/customer_advocate.md
theme:
name: material
diff --git a/mypy.ini b/mypy.ini
index 2b50293b561..cb2d3ce2443 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -12,6 +12,12 @@ disable_error_code = annotation-unchecked
[mypy-jmespath]
ignore_missing_imports=True
+[mypy-aws_encryption_sdk]
+ignore_missing_imports=True
+
+[mypy-sentry_sdk]
+ignore_missing_imports=True
+
[mypy-jmespath.exceptions]
ignore_missing_imports=True
diff --git a/package-lock.json b/package-lock.json
index ab39ad28389..0669368cd96 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -11,13 +11,13 @@
"package-lock.json": "^1.0.0"
},
"devDependencies": {
- "aws-cdk": "^2.96.2"
+ "aws-cdk": "^2.100.0"
}
},
"node_modules/aws-cdk": {
- "version": "2.96.2",
- "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.96.2.tgz",
- "integrity": "sha512-13ERpPV99OFAD75PLOtl0rRMXTWn6bCrmUPwYKkLwIMkj2xWCBiwo2Y9Qg+UzEszm5NMHA1N4ichSvuZ0mt2IQ==",
+ "version": "2.100.0",
+ "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.100.0.tgz",
+ "integrity": "sha512-Gt/4wPuEiBYw2tl0+cN0EbLxxJEvltcJxSQAcVHgNbqvDj49KUJ/oCbZ335dF0gK/hrVVb70xfNiYbBSPOsmvg==",
"dev": true,
"bin": {
"cdk": "bin/cdk"
@@ -51,9 +51,9 @@
},
"dependencies": {
"aws-cdk": {
- "version": "2.96.2",
- "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.96.2.tgz",
- "integrity": "sha512-13ERpPV99OFAD75PLOtl0rRMXTWn6bCrmUPwYKkLwIMkj2xWCBiwo2Y9Qg+UzEszm5NMHA1N4ichSvuZ0mt2IQ==",
+ "version": "2.100.0",
+ "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.100.0.tgz",
+ "integrity": "sha512-Gt/4wPuEiBYw2tl0+cN0EbLxxJEvltcJxSQAcVHgNbqvDj49KUJ/oCbZ335dF0gK/hrVVb70xfNiYbBSPOsmvg==",
"dev": true,
"requires": {
"fsevents": "2.3.2"
diff --git a/package.json b/package.json
index 337cc97184a..6cf535b8d72 100644
--- a/package.json
+++ b/package.json
@@ -2,7 +2,7 @@
"name": "aws-lambda-powertools-python-e2e",
"version": "1.0.0",
"devDependencies": {
- "aws-cdk": "^2.96.2"
+ "aws-cdk": "^2.100.0"
},
"dependencies": {
"package-lock.json": "^1.0.0"
diff --git a/poetry.lock b/poetry.lock
index d379e782470..d86fa9e3259 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -93,69 +93,69 @@ typeguard = ">=2.13.3,<2.14.0"
[[package]]
name = "aws-cdk-aws-apigatewayv2-alpha"
-version = "2.93.0a0"
+version = "2.100.0a0"
description = "The CDK Construct Library for AWS::APIGatewayv2"
optional = false
python-versions = "~=3.7"
files = [
- {file = "aws-cdk.aws-apigatewayv2-alpha-2.93.0a0.tar.gz", hash = "sha256:67b5c1cb5a3405f321a25da185ef949460793d9b33313f13544106bed2ce2180"},
- {file = "aws_cdk.aws_apigatewayv2_alpha-2.93.0a0-py3-none-any.whl", hash = "sha256:962d52fdfbc922f104381943d2edb0d535f1d793fd73f4518fb25fb7d63041f4"},
+ {file = "aws-cdk.aws-apigatewayv2-alpha-2.100.0a0.tar.gz", hash = "sha256:80ae7d1ca69d009bf1d1eac664937452faff46ab5a450703d1598a434ec1b0b6"},
+ {file = "aws_cdk.aws_apigatewayv2_alpha-2.100.0a0-py3-none-any.whl", hash = "sha256:5693a062808c16eac66c4b3a61f338ab16226455c254c93cce477c05da1cd11d"},
]
[package.dependencies]
-aws-cdk-lib = "2.93.0"
+aws-cdk-lib = ">=2.100.0,<3.0.0"
constructs = ">=10.0.0,<11.0.0"
-jsii = ">=1.87.0,<2.0.0"
+jsii = ">=1.89.0,<2.0.0"
publication = ">=0.0.3"
typeguard = ">=2.13.3,<2.14.0"
[[package]]
name = "aws-cdk-aws-apigatewayv2-authorizers-alpha"
-version = "2.93.0a0"
+version = "2.100.0a0"
description = "Authorizers for AWS APIGateway V2"
optional = false
python-versions = "~=3.7"
files = [
- {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.93.0a0.tar.gz", hash = "sha256:495969d05ca85942bc3da6fac7d0a6df5893265b644921d9e891441ee845fdfd"},
- {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.93.0a0-py3-none-any.whl", hash = "sha256:6b22e4d94afa481c94fcafdc62c2cf22ea08ea0d985e738569b39da4ba4ffbb0"},
+ {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.100.0a0.tar.gz", hash = "sha256:9bb30a2b1cd7af0da1b2259105943f1eb27b3b9db74fabf9eaca2f23a589c940"},
+ {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.100.0a0-py3-none-any.whl", hash = "sha256:c9ecfccb68205fb9e6a5002d797c8dada76a4251621c4cb14232c9a1ea871915"},
]
[package.dependencies]
-"aws-cdk.aws-apigatewayv2-alpha" = "2.93.0.a0"
-aws-cdk-lib = "2.93.0"
+"aws-cdk.aws-apigatewayv2-alpha" = "2.100.0.a0"
+aws-cdk-lib = ">=2.100.0,<3.0.0"
constructs = ">=10.0.0,<11.0.0"
-jsii = ">=1.87.0,<2.0.0"
+jsii = ">=1.89.0,<2.0.0"
publication = ">=0.0.3"
typeguard = ">=2.13.3,<2.14.0"
[[package]]
name = "aws-cdk-aws-apigatewayv2-integrations-alpha"
-version = "2.93.0a0"
+version = "2.100.0a0"
description = "Integrations for AWS APIGateway V2"
optional = false
python-versions = "~=3.7"
files = [
- {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.93.0a0.tar.gz", hash = "sha256:4c581f67634fab19b11025751e3ee825f055ee9d1bc77d9cbc5009f261456e62"},
- {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.93.0a0-py3-none-any.whl", hash = "sha256:48479656dca9e446ae625e5936ddd940863bd478eb86cdd62889c6b5fee9f751"},
+ {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.100.0a0.tar.gz", hash = "sha256:a5d2e0ede184e7f375fd11842c7c25a4aeb0844bf0a73d73fee0d4efdc8cdaa2"},
+ {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.100.0a0-py3-none-any.whl", hash = "sha256:162607adc03602b7babedbeac4de0b8ba51d58ac28996a5aa45872ad2eccfa1e"},
]
[package.dependencies]
-"aws-cdk.aws-apigatewayv2-alpha" = "2.93.0.a0"
-aws-cdk-lib = "2.93.0"
+"aws-cdk.aws-apigatewayv2-alpha" = "2.100.0.a0"
+aws-cdk-lib = ">=2.100.0,<3.0.0"
constructs = ">=10.0.0,<11.0.0"
-jsii = ">=1.87.0,<2.0.0"
+jsii = ">=1.89.0,<2.0.0"
publication = ">=0.0.3"
typeguard = ">=2.13.3,<2.14.0"
[[package]]
name = "aws-cdk-lib"
-version = "2.93.0"
+version = "2.100.0"
description = "Version 2 of the AWS Cloud Development Kit library"
optional = false
python-versions = "~=3.7"
files = [
- {file = "aws-cdk-lib-2.93.0.tar.gz", hash = "sha256:54252c8df547d2bd83584278529f47506fa2c27adcbfa623f00322b685f24c18"},
- {file = "aws_cdk_lib-2.93.0-py3-none-any.whl", hash = "sha256:063e7c1f2588a254766229130347fb60e0bd7dd2a6d222d3ae2aa145a6059554"},
+ {file = "aws-cdk-lib-2.100.0.tar.gz", hash = "sha256:39d6ab3ffcd87b60a541b4714f0ecefd169055a647376521728d64786393680a"},
+ {file = "aws_cdk_lib-2.100.0-py3-none-any.whl", hash = "sha256:8c43fe03e1e9e7dc9a4f8003d1c4e04c5ac5fe2ec4a44928695785dff6d086ee"},
]
[package.dependencies]
@@ -163,10 +163,27 @@ files = [
"aws-cdk.asset-kubectl-v20" = ">=2.1.2,<3.0.0"
"aws-cdk.asset-node-proxy-agent-v6" = ">=2.0.1,<3.0.0"
constructs = ">=10.0.0,<11.0.0"
-jsii = ">=1.87.0,<2.0.0"
+jsii = ">=1.89.0,<2.0.0"
publication = ">=0.0.3"
typeguard = ">=2.13.3,<2.14.0"
+[[package]]
+name = "aws-encryption-sdk"
+version = "3.1.1"
+description = "AWS Encryption SDK implementation for Python"
+optional = true
+python-versions = "*"
+files = [
+ {file = "aws-encryption-sdk-3.1.1.tar.gz", hash = "sha256:8d5fbf018fc68d6b1cacbe4dd037fd805296c7736a9fe457eb684d053f7f9563"},
+ {file = "aws_encryption_sdk-3.1.1-py2.py3-none-any.whl", hash = "sha256:a3cbbf04e0b9038b9180af8b03da896af19083e00ca011dcfcb403421458ad02"},
+]
+
+[package.dependencies]
+attrs = ">=17.4.0"
+boto3 = ">=1.10.0"
+cryptography = ">=2.5.0"
+wrapt = ">=1.10.11"
+
[[package]]
name = "aws-requests-auth"
version = "0.4.3"
@@ -183,33 +200,33 @@ requests = ">=0.14.0"
[[package]]
name = "aws-sam-translator"
-version = "1.73.0"
+version = "1.77.0"
description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates"
optional = false
python-versions = ">=3.7, <=4.0, !=4.0"
files = [
- {file = "aws-sam-translator-1.73.0.tar.gz", hash = "sha256:bfa7cad3a78f002edeec5e39fd61b616cf84f34f61010c5dc2f7a76845fe7a02"},
- {file = "aws_sam_translator-1.73.0-py3-none-any.whl", hash = "sha256:c0132b065d743773fcd2573ed1ae60e0129fa46043fad76430261b098a811924"},
+ {file = "aws-sam-translator-1.77.0.tar.gz", hash = "sha256:fd6ddd8fef93f0120d8acf2239423bb72909e39e21c8afd70e6b908b07068612"},
+ {file = "aws_sam_translator-1.77.0-py3-none-any.whl", hash = "sha256:987513f653d3ca5cb8ce073653a1d66c80ca25a12d28600e763d22b8ebc82144"},
]
[package.dependencies]
boto3 = ">=1.19.5,<2.dev0"
jsonschema = ">=3.2,<5"
-pydantic = ">=1.8,<2.0"
+pydantic = ">=1.8,<3"
typing-extensions = ">=4.4,<5"
[package.extras]
-dev = ["black (==23.1.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.1.0,<1.2.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.263)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"]
+dev = ["black (==23.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.284)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"]
[[package]]
name = "aws-xray-sdk"
-version = "2.12.0"
+version = "2.12.1"
description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service."
optional = true
-python-versions = "*"
+python-versions = ">=3.7"
files = [
- {file = "aws-xray-sdk-2.12.0.tar.gz", hash = "sha256:295afc237073a80956d7d4f27c31830edcb9a8ccca9ef8aa44990badef15e5b7"},
- {file = "aws_xray_sdk-2.12.0-py2.py3-none-any.whl", hash = "sha256:30886e23cc2daadc1c06a76f25b071205e84848419d1ddf097b62a565e156542"},
+ {file = "aws-xray-sdk-2.12.1.tar.gz", hash = "sha256:0bbfdbc773cfef4061062ac940b85e408297a2242f120bcdfee2593209b1e432"},
+ {file = "aws_xray_sdk-2.12.1-py2.py3-none-any.whl", hash = "sha256:f6803832dc08d18cc265e2327a69bfa9ee41c121fac195edc9745d04b7a566c3"},
]
[package.dependencies]
@@ -218,18 +235,21 @@ wrapt = "*"
[[package]]
name = "babel"
-version = "2.12.1"
+version = "2.13.0"
description = "Internationalization utilities"
optional = false
python-versions = ">=3.7"
files = [
- {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"},
- {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"},
+ {file = "Babel-2.13.0-py3-none-any.whl", hash = "sha256:fbfcae1575ff78e26c7449136f1abbefc3c13ce542eeb13d43d50d8b047216ec"},
+ {file = "Babel-2.13.0.tar.gz", hash = "sha256:04c3e2d28d2b7681644508f836be388ae49e0cfe91465095340395b60d00f210"},
]
[package.dependencies]
pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""}
+[package.extras]
+dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
+
[[package]]
name = "bandit"
version = "1.7.5"
@@ -305,38 +325,41 @@ uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "boto3"
-version = "1.28.35"
+version = "1.28.62"
description = "The AWS SDK for Python"
optional = false
python-versions = ">= 3.7"
files = [
- {file = "boto3-1.28.35-py3-none-any.whl", hash = "sha256:d77415f22bbc14f3d72eaed2fc9f96d161f3ba7686922ad26d6bbc9d4985f3df"},
- {file = "boto3-1.28.35.tar.gz", hash = "sha256:580b584e36967155abed7cc9b088b3bd784e8242ae4d8841f58cb50ab05520dc"},
+ {file = "boto3-1.28.62-py3-none-any.whl", hash = "sha256:0dfa2fc96ccafce4feb23044d6cba8b25075ad428a0c450d369d099c6a1059d2"},
+ {file = "boto3-1.28.62.tar.gz", hash = "sha256:148eeba0f1867b3db5b3e5ae2997d75a94d03fad46171374a0819168c36f7ed0"},
]
[package.dependencies]
-botocore = ">=1.31.35,<1.32.0"
+botocore = ">=1.31.62,<1.32.0"
jmespath = ">=0.7.1,<2.0.0"
-s3transfer = ">=0.6.0,<0.7.0"
+s3transfer = ">=0.7.0,<0.8.0"
[package.extras]
crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
-version = "1.31.35"
+version = "1.31.62"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">= 3.7"
files = [
- {file = "botocore-1.31.35-py3-none-any.whl", hash = "sha256:943e1465aad66db4933b06809134bd08c5b05e8eb18c19742ffec82f54769457"},
- {file = "botocore-1.31.35.tar.gz", hash = "sha256:7e4534325262f43293a9cc9937cb3f1711365244ffde8b925a6ee862bcf30a83"},
+ {file = "botocore-1.31.62-py3-none-any.whl", hash = "sha256:be792d806afc064694a2d0b9b25779f3ca0c1584b29a35ac32e67f0064ddb8b7"},
+ {file = "botocore-1.31.62.tar.gz", hash = "sha256:272b78ac65256b6294cb9cdb0ac484d447ad3a85642e33cb6a3b1b8afee15a4c"},
]
[package.dependencies]
jmespath = ">=0.7.1,<2.0.0"
python-dateutil = ">=2.1,<3.0.0"
-urllib3 = ">=1.25.4,<1.27"
+urllib3 = [
+ {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""},
+ {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""},
+]
[package.extras]
crt = ["awscrt (==0.16.26)"]
@@ -354,13 +377,13 @@ files = [
[[package]]
name = "bytecode"
-version = "0.14.2"
+version = "0.15.0"
description = "Python module to generate and modify bytecode"
optional = false
python-versions = ">=3.8"
files = [
- {file = "bytecode-0.14.2-py3-none-any.whl", hash = "sha256:e368a2b9bbd7c986133c951250db94fb32f774cfc49752a9db9073bcf9899762"},
- {file = "bytecode-0.14.2.tar.gz", hash = "sha256:386378d9025d68ddb144870ae74330a492717b11b8c9164c4034e88add808f0c"},
+ {file = "bytecode-0.15.0-py3-none-any.whl", hash = "sha256:a66718dc1d246b4fec52b5850c15592344a56c8bdb28fd243c895ccf00f8371f"},
+ {file = "bytecode-0.15.0.tar.gz", hash = "sha256:0908a8348cabf366b5c1865daabcdc0d650cb0cbdeb1750cc90564852f81945c"},
]
[package.dependencies]
@@ -402,19 +425,95 @@ files = [
{file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
]
+[[package]]
+name = "cffi"
+version = "1.15.1"
+description = "Foreign Function Interface for Python calling C code."
+optional = true
+python-versions = "*"
+files = [
+ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
+ {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"},
+ {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"},
+ {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"},
+ {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"},
+ {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"},
+ {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"},
+ {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"},
+ {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"},
+ {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"},
+ {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"},
+ {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"},
+ {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"},
+ {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"},
+ {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"},
+ {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"},
+ {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"},
+ {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"},
+ {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"},
+ {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"},
+ {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"},
+ {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"},
+ {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"},
+ {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"},
+ {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"},
+ {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"},
+ {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"},
+ {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"},
+ {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"},
+ {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"},
+ {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"},
+ {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"},
+ {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"},
+ {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"},
+ {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"},
+]
+
+[package.dependencies]
+pycparser = "*"
+
[[package]]
name = "cfn-lint"
-version = "0.80.2"
+version = "0.81.0"
description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved"
optional = false
python-versions = ">=3.7, <=4.0, !=4.0"
files = [
- {file = "cfn-lint-0.80.2.tar.gz", hash = "sha256:3da65b097fe9be335d3d6a797797d8af3e304f449d48df354a6eff8f005d0bb9"},
- {file = "cfn_lint-0.80.2-py3-none-any.whl", hash = "sha256:efd67cd7691691e91165ddd4631b351cffc8440fee2713e7f713b105d9c1958a"},
+ {file = "cfn-lint-0.81.0.tar.gz", hash = "sha256:532cbfe076fa8b68e70ec67743e9086169ef6d15be3306cae8aa57b38994fd8f"},
+ {file = "cfn_lint-0.81.0-py3-none-any.whl", hash = "sha256:8f34392412ffe6213aa0897d469133123c2d8ad576a9cdafbc1f7b630f98d597"},
]
[package.dependencies]
-aws-sam-translator = ">=1.73.0"
+aws-sam-translator = ">=1.75.0"
jschema-to-python = ">=1.2.3,<1.3.0"
jsonpatch = "*"
jsonschema = ">=3.0,<5"
@@ -427,86 +526,101 @@ sympy = ">=1.0.0"
[[package]]
name = "charset-normalizer"
-version = "3.2.0"
+version = "3.3.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
files = [
- {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"},
- {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"},
- {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"},
- {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"},
- {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"},
- {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"},
- {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"},
- {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"},
- {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"},
- {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"},
- {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"},
- {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"},
- {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"},
- {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"},
- {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"},
- {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"},
- {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"},
- {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"},
- {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"},
+ {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"},
+ {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"},
+ {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"},
+ {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"},
+ {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"},
+ {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"},
+ {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"},
+ {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"},
+ {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"},
+ {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"},
+ {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"},
+ {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"},
+ {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"},
+ {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"},
+ {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"},
+ {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"},
+ {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"},
+ {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"},
+ {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"},
+ {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"},
]
[[package]]
@@ -548,17 +662,17 @@ files = [
[[package]]
name = "constructs"
-version = "10.2.69"
+version = "10.3.0"
description = "A programming model for software-defined state"
optional = false
python-versions = "~=3.7"
files = [
- {file = "constructs-10.2.69-py3-none-any.whl", hash = "sha256:27a60f5ce4faa4d43c91c73f24e1a245c0a1ef67ea1c8a3df9ca6af9adf618df"},
- {file = "constructs-10.2.69.tar.gz", hash = "sha256:520ddd665cc336df90be06bb1bd49f3a9a7400d886cad8aef7b0155593b4ffa4"},
+ {file = "constructs-10.3.0-py3-none-any.whl", hash = "sha256:2972f514837565ff5b09171cfba50c0159dfa75ee86a42921ea8c86f2941b3d2"},
+ {file = "constructs-10.3.0.tar.gz", hash = "sha256:518551135ec236f9cc6b86500f4fbbe83b803ccdc6c2cb7684e0b7c4d234e7b1"},
]
[package.dependencies]
-jsii = ">=1.84.0,<2.0.0"
+jsii = ">=1.90.0,<2.0.0"
publication = ">=0.0.3"
typeguard = ">=2.13.3,<2.14.0"
@@ -637,15 +751,60 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1
[package.extras]
toml = ["tomli"]
+[[package]]
+name = "cryptography"
+version = "41.0.4"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"},
+ {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"},
+ {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"},
+ {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"},
+ {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"},
+ {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"},
+ {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"},
+ {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"},
+ {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"},
+ {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"},
+ {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"},
+ {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"},
+ {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"},
+ {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"},
+ {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"},
+ {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"},
+ {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"},
+ {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"},
+ {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"},
+ {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"},
+ {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"},
+ {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"},
+ {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"},
+]
+
+[package.dependencies]
+cffi = ">=1.12"
+
+[package.extras]
+docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
+docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
+nox = ["nox"]
+pep8test = ["black", "check-sdist", "mypy", "ruff"]
+sdist = ["build"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
+test-randomorder = ["pytest-randomly"]
+
[[package]]
name = "datadog"
-version = "0.46.0"
+version = "0.47.0"
description = "The Datadog Python library"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [
- {file = "datadog-0.46.0-py2.py3-none-any.whl", hash = "sha256:3d7bcda6177b43be4cdb52e16b4bdd4f9005716c0dd7cfea009e018c36bb7a3d"},
- {file = "datadog-0.46.0.tar.gz", hash = "sha256:e4fbc92a85e2b0919a226896ae45fc5e4b356c0c57f1c2659659dfbe0789c674"},
+ {file = "datadog-0.47.0-py2.py3-none-any.whl", hash = "sha256:a45ec997ab554208837e8c44d81d0e1456539dc14da5743687250e028bc809b7"},
+ {file = "datadog-0.47.0.tar.gz", hash = "sha256:47be3b2c3d709a7f5b709eb126ed4fe6cc7977d618fe5c158dd89c2a9f7d9916"},
]
[package.dependencies]
@@ -653,13 +812,13 @@ requests = ">=2.6.0"
[[package]]
name = "datadog-lambda"
-version = "4.78.0"
+version = "4.80.0"
description = "The Datadog AWS Lambda Library"
optional = false
python-versions = ">=3.7.0,<4"
files = [
- {file = "datadog_lambda-4.78.0-py3-none-any.whl", hash = "sha256:660bae6057f3b2033b0c035e9d542af491e40f9ce57b97b4891c491262b9148c"},
- {file = "datadog_lambda-4.78.0.tar.gz", hash = "sha256:3e57faa8f80ddd43b595355b92045fde8f9ed87efe8619133e82cebb87cbe434"},
+ {file = "datadog_lambda-4.80.0-py3-none-any.whl", hash = "sha256:506b8964567230d87e2bfd323420854d37b4d7c7a9bfab7e192389f9b4c8150c"},
+ {file = "datadog_lambda-4.80.0.tar.gz", hash = "sha256:ddd3ed20592df97523ae26ba552b69de239520c37e31804ca9949b010f90b461"},
]
[package.dependencies]
@@ -857,13 +1016,13 @@ testing = ["hatch", "pre-commit", "pytest", "tox"]
[[package]]
name = "fastjsonschema"
-version = "2.18.0"
+version = "2.18.1"
description = "Fastest Python implementation of JSON schema"
optional = true
python-versions = "*"
files = [
- {file = "fastjsonschema-2.18.0-py3-none-any.whl", hash = "sha256:128039912a11a807068a7c87d0da36660afbfd7202780db26c4aa7153cfdc799"},
- {file = "fastjsonschema-2.18.0.tar.gz", hash = "sha256:e820349dd16f806e4bd1467a138dced9def4bc7d6213a34295272a6cac95b5bd"},
+ {file = "fastjsonschema-2.18.1-py3-none-any.whl", hash = "sha256:aec6a19e9f66e9810ab371cc913ad5f4e9e479b63a7072a2cd060a9369e329a8"},
+ {file = "fastjsonschema-2.18.1.tar.gz", hash = "sha256:06dc8680d937628e993fa0cd278f196d20449a1adc087640710846b324d422ea"},
]
[package.extras]
@@ -917,19 +1076,22 @@ smmap = ">=3.0.1,<6"
[[package]]
name = "gitpython"
-version = "3.1.35"
+version = "3.1.37"
description = "GitPython is a Python library used to interact with Git repositories"
optional = false
python-versions = ">=3.7"
files = [
- {file = "GitPython-3.1.35-py3-none-any.whl", hash = "sha256:c19b4292d7a1d3c0f653858db273ff8a6614100d1eb1528b014ec97286193c09"},
- {file = "GitPython-3.1.35.tar.gz", hash = "sha256:9cbefbd1789a5fe9bcf621bb34d3f441f3a90c8461d377f84eda73e721d9b06b"},
+ {file = "GitPython-3.1.37-py3-none-any.whl", hash = "sha256:5f4c4187de49616d710a77e98ddf17b4782060a1788df441846bddefbb89ab33"},
+ {file = "GitPython-3.1.37.tar.gz", hash = "sha256:f9b9ddc0761c125d5780eab2d64be4873fc6817c2899cbcb34b02344bdc7bc54"},
]
[package.dependencies]
gitdb = ">=4.0.1,<5"
typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""}
+[package.extras]
+test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-sugar"]
+
[[package]]
name = "h11"
version = "0.14.0"
@@ -1043,6 +1205,17 @@ files = [
{file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a3a6a2fbbe7550ffe52d151cf76065e6b89cfb3e9d0463e49a7e322a25d0426"},
{file = "ijson-3.2.3-cp311-cp311-win32.whl", hash = "sha256:6a4db2f7fb9acfb855c9ae1aae602e4648dd1f88804a0d5cfb78c3639bcf156c"},
{file = "ijson-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:ccd6be56335cbb845f3d3021b1766299c056c70c4c9165fb2fbe2d62258bae3f"},
+ {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:055b71bbc37af5c3c5861afe789e15211d2d3d06ac51ee5a647adf4def19c0ea"},
+ {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c075a547de32f265a5dd139ab2035900fef6653951628862e5cdce0d101af557"},
+ {file = "ijson-3.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:457f8a5fc559478ac6b06b6d37ebacb4811f8c5156e997f0d87d708b0d8ab2ae"},
+ {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9788f0c915351f41f0e69ec2618b81ebfcf9f13d9d67c6d404c7f5afda3e4afb"},
+ {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa234ab7a6a33ed51494d9d2197fb96296f9217ecae57f5551a55589091e7853"},
+ {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd0dc5da4f9dc6d12ab6e8e0c57d8b41d3c8f9ceed31a99dae7b2baf9ea769a"},
+ {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c6beb80df19713e39e68dc5c337b5c76d36ccf69c30b79034634e5e4c14d6904"},
+ {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a2973ce57afb142d96f35a14e9cfec08308ef178a2c76b8b5e1e98f3960438bf"},
+ {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:105c314fd624e81ed20f925271ec506523b8dd236589ab6c0208b8707d652a0e"},
+ {file = "ijson-3.2.3-cp312-cp312-win32.whl", hash = "sha256:ac44781de5e901ce8339352bb5594fcb3b94ced315a34dbe840b4cff3450e23b"},
+ {file = "ijson-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:0567e8c833825b119e74e10a7c29761dc65fcd155f5d4cb10f9d3b8916ef9912"},
{file = "ijson-3.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eeb286639649fb6bed37997a5e30eefcacddac79476d24128348ec890b2a0ccb"},
{file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:396338a655fb9af4ac59dd09c189885b51fa0eefc84d35408662031023c110d1"},
{file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e0243d166d11a2a47c17c7e885debf3b19ed136be2af1f5d1c34212850236ac"},
@@ -1213,13 +1386,13 @@ pbr = "*"
[[package]]
name = "jsii"
-version = "1.88.0"
+version = "1.90.0"
description = "Python client for jsii runtime"
optional = false
python-versions = "~=3.7"
files = [
- {file = "jsii-1.88.0-py3-none-any.whl", hash = "sha256:b3888141c30b83a30bfbe03a877bbf8ae42f957b6ccca02bae448853debffaf8"},
- {file = "jsii-1.88.0.tar.gz", hash = "sha256:a59e0f962589dcc741d2bcf2a7b4c4a927a29d3f9a2804a192c734e2e3275018"},
+ {file = "jsii-1.90.0-py3-none-any.whl", hash = "sha256:e8a9a94c5116da96f11e79f16d4a290e1e7e1652b4addb8cce5c56f8ef570479"},
+ {file = "jsii-1.90.0.tar.gz", hash = "sha256:2fcc68d8cf88260bc8e502789d43ab46e7672b6f82d498ed62a52a4366fbccc5"},
]
[package.dependencies]
@@ -1419,16 +1592,6 @@ files = [
{file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
@@ -1506,13 +1669,13 @@ test = ["coverage", "flake8 (>=3.0)", "shtab"]
[[package]]
name = "mkdocs"
-version = "1.5.2"
+version = "1.5.3"
description = "Project documentation with Markdown."
optional = false
python-versions = ">=3.7"
files = [
- {file = "mkdocs-1.5.2-py3-none-any.whl", hash = "sha256:60a62538519c2e96fe8426654a67ee177350451616118a41596ae7c876bb7eac"},
- {file = "mkdocs-1.5.2.tar.gz", hash = "sha256:70d0da09c26cff288852471be03c23f0f521fc15cf16ac89c7a3bfb9ae8d24f9"},
+ {file = "mkdocs-1.5.3-py3-none-any.whl", hash = "sha256:3b3a78e736b31158d64dbb2f8ba29bd46a379d0c6e324c2246c3bc3d2189cfc1"},
+ {file = "mkdocs-1.5.3.tar.gz", hash = "sha256:eb7c99214dcb945313ba30426c2451b735992c73c2e10838f76d09e39ff4d0e2"},
]
[package.dependencies]
@@ -1577,13 +1740,13 @@ requests = ">=2.26,<3.0"
[[package]]
name = "mkdocs-material-extensions"
-version = "1.1.1"
+version = "1.2"
description = "Extension pack for Python Markdown and MkDocs Material."
optional = false
python-versions = ">=3.7"
files = [
- {file = "mkdocs_material_extensions-1.1.1-py3-none-any.whl", hash = "sha256:e41d9f38e4798b6617ad98ca8f7f1157b1e4385ac1459ca1e4ea219b556df945"},
- {file = "mkdocs_material_extensions-1.1.1.tar.gz", hash = "sha256:9c003da71e2cc2493d910237448c672e00cefc800d3d6ae93d2fc69979e3bd93"},
+ {file = "mkdocs_material_extensions-1.2-py3-none-any.whl", hash = "sha256:c767bd6d6305f6420a50f0b541b0c9966d52068839af97029be14443849fb8a1"},
+ {file = "mkdocs_material_extensions-1.2.tar.gz", hash = "sha256:27e2d1ed2d031426a6e10d5ea06989d67e90bb02acd588bc5673106b5ee5eedf"},
]
[[package]]
@@ -1652,13 +1815,13 @@ reports = ["lxml"]
[[package]]
name = "mypy-boto3-appconfig"
-version = "1.28.52"
-description = "Type annotations for boto3.AppConfig 1.28.52 service generated with mypy-boto3-builder 7.19.0"
+version = "1.28.60"
+description = "Type annotations for boto3.AppConfig 1.28.60 service generated with mypy-boto3-builder 7.19.0"
optional = false
python-versions = ">=3.7"
files = [
- {file = "mypy-boto3-appconfig-1.28.52.tar.gz", hash = "sha256:3a31c569394cb28edd6d40b235f3ad6c8829450626566ee7d0fa1466339a7f5f"},
- {file = "mypy_boto3_appconfig-1.28.52-py3-none-any.whl", hash = "sha256:3335d41af596cac5b63fc3551b79531fea8f904fc91fa27a69a21fdd5c0275fe"},
+ {file = "mypy-boto3-appconfig-1.28.60.tar.gz", hash = "sha256:50ac377c8f6233a29f233d3c7af40094b0a7ffdf5e0fb56696a91328f21954d6"},
+ {file = "mypy_boto3_appconfig-1.28.60-py3-none-any.whl", hash = "sha256:49fd889cd3565b3d0c6bfe36c2bb68f76a50f2ee84e3e88507cf154d04834adf"},
]
[package.dependencies]
@@ -1708,13 +1871,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""}
[[package]]
name = "mypy-boto3-dynamodb"
-version = "1.28.36"
-description = "Type annotations for boto3.DynamoDB 1.28.36 service generated with mypy-boto3-builder 7.18.0"
+version = "1.28.55"
+description = "Type annotations for boto3.DynamoDB 1.28.55 service generated with mypy-boto3-builder 7.19.0"
optional = false
python-versions = ">=3.7"
files = [
- {file = "mypy-boto3-dynamodb-1.28.36.tar.gz", hash = "sha256:5fe1d336fdc8c58f345c9c1b4e4c1a2d164660531cf3a074d4598975fb2687de"},
- {file = "mypy_boto3_dynamodb-1.28.36-py3-none-any.whl", hash = "sha256:9a3b49385d17e421661ab8639fc09cc64a706198be20287f82d83511289294a3"},
+ {file = "mypy-boto3-dynamodb-1.28.55.tar.gz", hash = "sha256:a3039f8ada07a218f97f0c70a82ed9cf461a0cb5133194fcf1e0e87b15c899a5"},
+ {file = "mypy_boto3_dynamodb-1.28.55-py3-none-any.whl", hash = "sha256:c4c16a00e90db5857cbeee207f6dec954ca142bd52e2de0f3d52be6d50d83d16"},
]
[package.dependencies]
@@ -1722,13 +1885,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""}
[[package]]
name = "mypy-boto3-lambda"
-version = "1.28.36"
-description = "Type annotations for boto3.Lambda 1.28.36 service generated with mypy-boto3-builder 7.18.0"
+version = "1.28.63"
+description = "Type annotations for boto3.Lambda 1.28.63 service generated with mypy-boto3-builder 7.19.0"
optional = false
python-versions = ">=3.7"
files = [
- {file = "mypy-boto3-lambda-1.28.36.tar.gz", hash = "sha256:70498e6ff6bfd60b758553d27fadf691ba169572faca01c2bd457da0b48b9cff"},
- {file = "mypy_boto3_lambda-1.28.36-py3-none-any.whl", hash = "sha256:edb1f49279f7713929a70eaab00cf3d4ba65a10016db636805d022b2eaf14c84"},
+ {file = "mypy-boto3-lambda-1.28.63.tar.gz", hash = "sha256:7cbbee5560f347548a8f43324b31b2abfa1f56ec7380f20dadb837533fc0552a"},
+ {file = "mypy_boto3_lambda-1.28.63-py3-none-any.whl", hash = "sha256:bcfc747594704664d41fb904f59e4173c718d1bffc92555fc9ca57f8c4b1b970"},
]
[package.dependencies]
@@ -1750,13 +1913,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""}
[[package]]
name = "mypy-boto3-s3"
-version = "1.28.52"
-description = "Type annotations for boto3.S3 1.28.52 service generated with mypy-boto3-builder 7.19.0"
+version = "1.28.55"
+description = "Type annotations for boto3.S3 1.28.55 service generated with mypy-boto3-builder 7.19.0"
optional = false
python-versions = ">=3.7"
files = [
- {file = "mypy-boto3-s3-1.28.52.tar.gz", hash = "sha256:179cb7542cc5ef656f1323ad51eb237afcba77d1e5ed07d21a013fe36effb8b2"},
- {file = "mypy_boto3_s3-1.28.52-py3-none-any.whl", hash = "sha256:a75cd5ff28f1cb5109dd50db94259436701208fa97c61b5a2cc0689e169b7cba"},
+ {file = "mypy-boto3-s3-1.28.55.tar.gz", hash = "sha256:b008809f448e74075012d4fc54b0176de0b4f49bc38e39de30ca0e764eb75056"},
+ {file = "mypy_boto3_s3-1.28.55-py3-none-any.whl", hash = "sha256:11a3db97398973d4ae28489b94c010778a0a5c65f99e00268456c3fea67eca79"},
]
[package.dependencies]
@@ -1778,13 +1941,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""}
[[package]]
name = "mypy-boto3-ssm"
-version = "1.28.36"
-description = "Type annotations for boto3.SSM 1.28.36 service generated with mypy-boto3-builder 7.18.0"
+version = "1.28.54"
+description = "Type annotations for boto3.SSM 1.28.54 service generated with mypy-boto3-builder 7.19.0"
optional = false
python-versions = ">=3.7"
files = [
- {file = "mypy-boto3-ssm-1.28.36.tar.gz", hash = "sha256:06de6f9ba9fcdda7af12a4f90d3fff965642d9a514731b77a5d1a9308e546643"},
- {file = "mypy_boto3_ssm-1.28.36-py3-none-any.whl", hash = "sha256:b578f07ba6c86916e7be759eff344e0549557e22a3207d548d579c7df58d3fd4"},
+ {file = "mypy-boto3-ssm-1.28.54.tar.gz", hash = "sha256:71fdb8e90cbb7e1560bd4eccf70dfc264aac7564266b8188f9898d369209b49f"},
+ {file = "mypy_boto3_ssm-1.28.54-py3-none-any.whl", hash = "sha256:9fc591e88461f5b9ae27c7e94b413634e9fa9d67e436e563a76563b2c849cb39"},
]
[package.dependencies]
@@ -1835,13 +1998,13 @@ test = ["codecov (>=2.1)", "pytest (>=6.2)", "pytest-cov (>=2.12)"]
[[package]]
name = "opentelemetry-api"
-version = "1.19.0"
+version = "1.20.0"
description = "OpenTelemetry Python API"
optional = false
python-versions = ">=3.7"
files = [
- {file = "opentelemetry_api-1.19.0-py3-none-any.whl", hash = "sha256:dcd2a0ad34b691964947e1d50f9e8c415c32827a1d87f0459a72deb9afdf5597"},
- {file = "opentelemetry_api-1.19.0.tar.gz", hash = "sha256:db374fb5bea00f3c7aa290f5d94cea50b659e6ea9343384c5f6c2bb5d5e8db65"},
+ {file = "opentelemetry_api-1.20.0-py3-none-any.whl", hash = "sha256:982b76036fec0fdaf490ae3dfd9f28c81442a33414f737abc687a32758cdcba5"},
+ {file = "opentelemetry_api-1.20.0.tar.gz", hash = "sha256:06abe351db7572f8afdd0fb889ce53f3c992dbf6f6262507b385cc1963e06983"},
]
[package.dependencies]
@@ -1850,13 +2013,13 @@ importlib-metadata = ">=6.0,<7.0"
[[package]]
name = "packaging"
-version = "23.1"
+version = "23.2"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
files = [
- {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
- {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
+ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
+ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
]
[[package]]
@@ -1919,13 +2082,13 @@ files = [
[[package]]
name = "platformdirs"
-version = "3.10.0"
+version = "3.11.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
optional = false
python-versions = ">=3.7"
files = [
- {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"},
- {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"},
+ {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"},
+ {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"},
]
[package.dependencies]
@@ -1955,24 +2118,24 @@ testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "protobuf"
-version = "4.24.2"
+version = "4.24.4"
description = ""
optional = false
python-versions = ">=3.7"
files = [
- {file = "protobuf-4.24.2-cp310-abi3-win32.whl", hash = "sha256:58e12d2c1aa428ece2281cef09bbaa6938b083bcda606db3da4e02e991a0d924"},
- {file = "protobuf-4.24.2-cp310-abi3-win_amd64.whl", hash = "sha256:77700b55ba41144fc64828e02afb41901b42497b8217b558e4a001f18a85f2e3"},
- {file = "protobuf-4.24.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:237b9a50bd3b7307d0d834c1b0eb1a6cd47d3f4c2da840802cd03ea288ae8880"},
- {file = "protobuf-4.24.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:25ae91d21e3ce8d874211110c2f7edd6384816fb44e06b2867afe35139e1fd1c"},
- {file = "protobuf-4.24.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:c00c3c7eb9ad3833806e21e86dca448f46035242a680f81c3fe068ff65e79c74"},
- {file = "protobuf-4.24.2-cp37-cp37m-win32.whl", hash = "sha256:4e69965e7e54de4db989289a9b971a099e626f6167a9351e9d112221fc691bc1"},
- {file = "protobuf-4.24.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c5cdd486af081bf752225b26809d2d0a85e575b80a84cde5172a05bbb1990099"},
- {file = "protobuf-4.24.2-cp38-cp38-win32.whl", hash = "sha256:6bd26c1fa9038b26c5c044ee77e0ecb18463e957fefbaeb81a3feb419313a54e"},
- {file = "protobuf-4.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb7aa97c252279da65584af0456f802bd4b2de429eb945bbc9b3d61a42a8cd16"},
- {file = "protobuf-4.24.2-cp39-cp39-win32.whl", hash = "sha256:2b23bd6e06445699b12f525f3e92a916f2dcf45ffba441026357dea7fa46f42b"},
- {file = "protobuf-4.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:839952e759fc40b5d46be319a265cf94920174d88de31657d5622b5d8d6be5cd"},
- {file = "protobuf-4.24.2-py3-none-any.whl", hash = "sha256:3b7b170d3491ceed33f723bbf2d5a260f8a4e23843799a3906f16ef736ef251e"},
- {file = "protobuf-4.24.2.tar.gz", hash = "sha256:7fda70797ddec31ddfa3576cbdcc3ddbb6b3078b737a1a87ab9136af0570cd6e"},
+ {file = "protobuf-4.24.4-cp310-abi3-win32.whl", hash = "sha256:ec9912d5cb6714a5710e28e592ee1093d68c5ebfeda61983b3f40331da0b1ebb"},
+ {file = "protobuf-4.24.4-cp310-abi3-win_amd64.whl", hash = "sha256:1badab72aa8a3a2b812eacfede5020472e16c6b2212d737cefd685884c191085"},
+ {file = "protobuf-4.24.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e61a27f362369c2f33248a0ff6896c20dcd47b5d48239cb9720134bef6082e4"},
+ {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:bffa46ad9612e6779d0e51ae586fde768339b791a50610d85eb162daeb23661e"},
+ {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:b493cb590960ff863743b9ff1452c413c2ee12b782f48beca77c8da3e2ffe9d9"},
+ {file = "protobuf-4.24.4-cp37-cp37m-win32.whl", hash = "sha256:dbbed8a56e56cee8d9d522ce844a1379a72a70f453bde6243e3c86c30c2a3d46"},
+ {file = "protobuf-4.24.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6b7d2e1c753715dcfe9d284a25a52d67818dd43c4932574307daf836f0071e37"},
+ {file = "protobuf-4.24.4-cp38-cp38-win32.whl", hash = "sha256:02212557a76cd99574775a81fefeba8738d0f668d6abd0c6b1d3adcc75503dbe"},
+ {file = "protobuf-4.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:2fa3886dfaae6b4c5ed2730d3bf47c7a38a72b3a1f0acb4d4caf68e6874b947b"},
+ {file = "protobuf-4.24.4-cp39-cp39-win32.whl", hash = "sha256:b77272f3e28bb416e2071186cb39efd4abbf696d682cbb5dc731308ad37fa6dd"},
+ {file = "protobuf-4.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:9fee5e8aa20ef1b84123bb9232b3f4a5114d9897ed89b4b8142d81924e05d79b"},
+ {file = "protobuf-4.24.4-py3-none-any.whl", hash = "sha256:80797ce7424f8c8d2f2547e2d42bfbb6c08230ce5832d6c099a37335c9c90a92"},
+ {file = "protobuf-4.24.4.tar.gz", hash = "sha256:5a70731910cd9104762161719c3d883c960151eea077134458503723b60e3667"},
]
[[package]]
@@ -1997,49 +2160,60 @@ files = [
{file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"},
]
+[[package]]
+name = "pycparser"
+version = "2.21"
+description = "C parser in Python"
+optional = true
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
+ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
+]
+
[[package]]
name = "pydantic"
-version = "1.10.12"
+version = "1.10.13"
description = "Data validation and settings management using python type hints"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"},
- {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"},
- {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"},
- {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"},
- {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"},
- {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"},
- {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"},
- {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"},
- {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"},
- {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"},
- {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"},
- {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"},
- {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"},
- {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"},
- {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"},
- {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"},
- {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"},
- {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"},
- {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"},
- {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"},
- {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"},
- {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"},
- {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"},
- {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"},
- {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"},
- {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"},
- {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"},
- {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"},
- {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"},
- {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"},
- {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"},
- {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"},
- {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"},
- {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"},
- {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"},
- {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"},
+ {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"},
+ {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"},
+ {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"},
+ {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"},
+ {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"},
+ {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"},
+ {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"},
+ {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"},
+ {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"},
+ {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"},
+ {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"},
+ {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"},
+ {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"},
+ {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"},
+ {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"},
+ {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"},
+ {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"},
+ {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"},
+ {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"},
+ {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"},
+ {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"},
+ {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"},
+ {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"},
+ {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"},
+ {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"},
+ {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"},
+ {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"},
+ {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"},
+ {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"},
+ {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"},
+ {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"},
+ {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"},
+ {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"},
+ {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"},
+ {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"},
+ {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"},
]
[package.dependencies]
@@ -2065,12 +2239,13 @@ plugins = ["importlib-metadata"]
[[package]]
name = "pyhcl"
-version = "0.4.4"
+version = "0.4.5"
description = "HCL configuration parser for python"
optional = false
python-versions = "*"
files = [
- {file = "pyhcl-0.4.4.tar.gz", hash = "sha256:2d9b9dcdf1023d812bfed561ba72c99104c5b3f52e558d595130a44ce081b003"},
+ {file = "pyhcl-0.4.5-py3-none-any.whl", hash = "sha256:30ee337d330d1f90c9f5ed8f49c468f66c8e6e43192bdc7c6ece1420beb3070c"},
+ {file = "pyhcl-0.4.5.tar.gz", hash = "sha256:c47293a51ccdd25e18bb5c8c0ab0ffe355b37c87f8d6f9d3280dc41efd4740bc"},
]
[[package]]
@@ -2224,6 +2399,20 @@ pytest = ">=5.0"
[package.extras]
dev = ["pre-commit", "pytest-asyncio", "tox"]
+[[package]]
+name = "pytest-socket"
+version = "0.6.0"
+description = "Pytest Plugin to disable socket calls during tests"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "pytest_socket-0.6.0-py3-none-any.whl", hash = "sha256:cca72f134ff01e0023c402e78d31b32e68da3efdf3493bf7788f8eba86a6824c"},
+ {file = "pytest_socket-0.6.0.tar.gz", hash = "sha256:363c1d67228315d4fc7912f1aabfd570de29d0e3db6217d61db5728adacd7138"},
+]
+
+[package.dependencies]
+pytest = ">=3.6.3"
+
[[package]]
name = "pytest-xdist"
version = "3.3.1"
@@ -2260,13 +2449,13 @@ six = ">=1.5"
[[package]]
name = "pytz"
-version = "2023.3"
+version = "2023.3.post1"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
files = [
- {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"},
- {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"},
+ {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"},
+ {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"},
]
[[package]]
@@ -2281,7 +2470,6 @@ files = [
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
- {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
@@ -2289,15 +2477,8 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
- {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
- {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
- {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
- {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
@@ -2314,7 +2495,6 @@ files = [
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
- {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
@@ -2322,7 +2502,6 @@ files = [
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
- {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
@@ -2493,13 +2672,13 @@ decorator = ">=3.4.2"
[[package]]
name = "rich"
-version = "13.5.2"
+version = "13.6.0"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
optional = false
python-versions = ">=3.7.0"
files = [
- {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"},
- {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"},
+ {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"},
+ {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"},
]
[package.dependencies]
@@ -2512,39 +2691,39 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]]
name = "ruff"
-version = "0.0.290"
+version = "0.0.292"
description = "An extremely fast Python linter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
- {file = "ruff-0.0.290-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:0e2b09ac4213b11a3520221083866a5816616f3ae9da123037b8ab275066fbac"},
- {file = "ruff-0.0.290-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:4ca6285aa77b3d966be32c9a3cd531655b3d4a0171e1f9bf26d66d0372186767"},
- {file = "ruff-0.0.290-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35e3550d1d9f2157b0fcc77670f7bb59154f223bff281766e61bdd1dd854e0c5"},
- {file = "ruff-0.0.290-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d748c8bd97874f5751aed73e8dde379ce32d16338123d07c18b25c9a2796574a"},
- {file = "ruff-0.0.290-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:982af5ec67cecd099e2ef5e238650407fb40d56304910102d054c109f390bf3c"},
- {file = "ruff-0.0.290-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bbd37352cea4ee007c48a44c9bc45a21f7ba70a57edfe46842e346651e2b995a"},
- {file = "ruff-0.0.290-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d9be6351b7889462912e0b8185a260c0219c35dfd920fb490c7f256f1d8313e"},
- {file = "ruff-0.0.290-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75cdc7fe32dcf33b7cec306707552dda54632ac29402775b9e212a3c16aad5e6"},
- {file = "ruff-0.0.290-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb07f37f7aecdbbc91d759c0c09870ce0fb3eed4025eebedf9c4b98c69abd527"},
- {file = "ruff-0.0.290-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2ab41bc0ba359d3f715fc7b705bdeef19c0461351306b70a4e247f836b9350ed"},
- {file = "ruff-0.0.290-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:150bf8050214cea5b990945b66433bf9a5e0cef395c9bc0f50569e7de7540c86"},
- {file = "ruff-0.0.290-py3-none-musllinux_1_2_i686.whl", hash = "sha256:75386ebc15fe5467248c039f5bf6a0cfe7bfc619ffbb8cd62406cd8811815fca"},
- {file = "ruff-0.0.290-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ac93eadf07bc4ab4c48d8bb4e427bf0f58f3a9c578862eb85d99d704669f5da0"},
- {file = "ruff-0.0.290-py3-none-win32.whl", hash = "sha256:461fbd1fb9ca806d4e3d5c745a30e185f7cf3ca77293cdc17abb2f2a990ad3f7"},
- {file = "ruff-0.0.290-py3-none-win_amd64.whl", hash = "sha256:f1f49f5ec967fd5778813780b12a5650ab0ebcb9ddcca28d642c689b36920796"},
- {file = "ruff-0.0.290-py3-none-win_arm64.whl", hash = "sha256:ae5a92dfbdf1f0c689433c223f8dac0782c2b2584bd502dfdbc76475669f1ba1"},
- {file = "ruff-0.0.290.tar.gz", hash = "sha256:949fecbc5467bb11b8db810a7fa53c7e02633856ee6bd1302b2f43adcd71b88d"},
+ {file = "ruff-0.0.292-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:02f29db018c9d474270c704e6c6b13b18ed0ecac82761e4fcf0faa3728430c96"},
+ {file = "ruff-0.0.292-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:69654e564342f507edfa09ee6897883ca76e331d4bbc3676d8a8403838e9fade"},
+ {file = "ruff-0.0.292-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c3c91859a9b845c33778f11902e7b26440d64b9d5110edd4e4fa1726c41e0a4"},
+ {file = "ruff-0.0.292-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4476f1243af2d8c29da5f235c13dca52177117935e1f9393f9d90f9833f69e4"},
+ {file = "ruff-0.0.292-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be8eb50eaf8648070b8e58ece8e69c9322d34afe367eec4210fdee9a555e4ca7"},
+ {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9889bac18a0c07018aac75ef6c1e6511d8411724d67cb879103b01758e110a81"},
+ {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bdfabd4334684a4418b99b3118793f2c13bb67bf1540a769d7816410402a205"},
+ {file = "ruff-0.0.292-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7c77c53bfcd75dbcd4d1f42d6cabf2485d2e1ee0678da850f08e1ab13081a8"},
+ {file = "ruff-0.0.292-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e087b24d0d849c5c81516ec740bf4fd48bf363cfb104545464e0fca749b6af9"},
+ {file = "ruff-0.0.292-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f160b5ec26be32362d0774964e218f3fcf0a7da299f7e220ef45ae9e3e67101a"},
+ {file = "ruff-0.0.292-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ac153eee6dd4444501c4bb92bff866491d4bfb01ce26dd2fff7ca472c8df9ad0"},
+ {file = "ruff-0.0.292-py3-none-musllinux_1_2_i686.whl", hash = "sha256:87616771e72820800b8faea82edd858324b29bb99a920d6aa3d3949dd3f88fb0"},
+ {file = "ruff-0.0.292-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b76deb3bdbea2ef97db286cf953488745dd6424c122d275f05836c53f62d4016"},
+ {file = "ruff-0.0.292-py3-none-win32.whl", hash = "sha256:e854b05408f7a8033a027e4b1c7f9889563dd2aca545d13d06711e5c39c3d003"},
+ {file = "ruff-0.0.292-py3-none-win_amd64.whl", hash = "sha256:f27282bedfd04d4c3492e5c3398360c9d86a295be00eccc63914438b4ac8a83c"},
+ {file = "ruff-0.0.292-py3-none-win_arm64.whl", hash = "sha256:7f67a69c8f12fbc8daf6ae6d36705037bde315abf8b82b6e1f4c9e74eb750f68"},
+ {file = "ruff-0.0.292.tar.gz", hash = "sha256:1093449e37dd1e9b813798f6ad70932b57cf614e5c2b5c51005bf67d55db33ac"},
]
[[package]]
name = "s3transfer"
-version = "0.6.2"
+version = "0.7.0"
description = "An Amazon S3 Transfer Manager"
optional = false
python-versions = ">= 3.7"
files = [
- {file = "s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:b014be3a8a2aab98cfe1abc7229cc5a9a0cf05eb9c1f2b86b230fd8df3f78084"},
- {file = "s3transfer-0.6.2.tar.gz", hash = "sha256:cab66d3380cca3e70939ef2255d01cd8aece6a4907a9528740f668c4b0611861"},
+ {file = "s3transfer-0.7.0-py3-none-any.whl", hash = "sha256:10d6923c6359175f264811ef4bf6161a3156ce8e350e705396a7557d6293c33a"},
+ {file = "s3transfer-0.7.0.tar.gz", hash = "sha256:fd3889a66f5fe17299fe75b82eae6cf722554edca744ca5d5fe308b104883d2e"},
]
[package.dependencies]
@@ -2570,13 +2749,13 @@ pbr = "*"
[[package]]
name = "sentry-sdk"
-version = "1.31.0"
+version = "1.32.0"
description = "Python client for Sentry (https://sentry.io)"
optional = false
python-versions = "*"
files = [
- {file = "sentry-sdk-1.31.0.tar.gz", hash = "sha256:6de2e88304873484207fed836388e422aeff000609b104c802749fd89d56ba5b"},
- {file = "sentry_sdk-1.31.0-py2.py3-none-any.whl", hash = "sha256:64a7141005fb775b9db298a30de93e3b83e0ddd1232dc6f36eb38aebc1553291"},
+ {file = "sentry-sdk-1.32.0.tar.gz", hash = "sha256:935e8fbd7787a3702457393b74b13d89a5afb67185bc0af85c00cb27cbd42e7c"},
+ {file = "sentry_sdk-1.32.0-py2.py3-none-any.whl", hash = "sha256:eeb0b3550536f3bbc05bb1c7e0feb3a78d74acb43b607159a606ed2ec0a33a4d"},
]
[package.dependencies]
@@ -2626,13 +2805,13 @@ files = [
[[package]]
name = "smmap"
-version = "5.0.0"
+version = "5.0.1"
description = "A pure Python implementation of a sliding window memory map manager"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
- {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"},
- {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"},
+ {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"},
+ {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"},
]
[[package]]
@@ -2764,13 +2943,13 @@ files = [
[[package]]
name = "types-requests"
-version = "2.31.0.3"
+version = "2.31.0.6"
description = "Typing stubs for requests"
optional = false
-python-versions = "*"
+python-versions = ">=3.7"
files = [
- {file = "types-requests-2.31.0.3.tar.gz", hash = "sha256:d5d7a08965fca12bedf716eaf5430c6e3d0da9f3164a1dba2a7f3885f9ebe3c0"},
- {file = "types_requests-2.31.0.3-py3-none-any.whl", hash = "sha256:938f51653c757716aeca5d72c405c5e2befad8b0d330e3b385ce7f148e1b10dc"},
+ {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"},
+ {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"},
]
[package.dependencies]
@@ -2800,17 +2979,17 @@ files = [
[[package]]
name = "urllib3"
-version = "1.26.16"
+version = "1.26.17"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
files = [
- {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"},
- {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"},
+ {file = "urllib3-1.26.17-py2.py3-none-any.whl", hash = "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b"},
+ {file = "urllib3-1.26.17.tar.gz", hash = "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21"},
]
[package.extras]
-brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
@@ -2997,6 +3176,7 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more
all = ["aws-xray-sdk", "fastjsonschema", "pydantic"]
aws-sdk = ["boto3"]
datadog = ["datadog-lambda"]
+datamasking-aws-sdk = ["aws-encryption-sdk"]
parser = ["pydantic"]
tracer = ["aws-xray-sdk"]
validation = ["fastjsonschema"]
@@ -3004,4 +3184,4 @@ validation = ["fastjsonschema"]
[metadata]
lock-version = "2.0"
python-versions = "^3.7.4"
-content-hash = "0cb9e818501c578e962f6007727f474f7133d27c52daf23fec9463e5890b2d21"
+content-hash = "1f1f3ba944e489e9e1799836b9850351529312111c8b7fe196cb1adddd9a9637"
diff --git a/pyproject.toml b/pyproject.toml
index d565e955595..639b4122b3f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,24 +1,34 @@
[tool.poetry]
name = "aws_lambda_powertools"
-version = "2.25.0"
+version = "2.26.0"
description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity."
authors = ["Amazon Web Services"]
include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"]
-classifiers=[
- "Development Status :: 5 - Production/Stable",
- "Intended Audience :: Developers",
- "License :: OSI Approved :: MIT No Attribution License (MIT-0)",
- "Natural Language :: English",
- "Programming Language :: Python :: 3.7",
- "Programming Language :: Python :: 3.8",
- "Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: 3.10",
- "Programming Language :: Python :: 3.11",
+classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT No Attribution License (MIT-0)",
+ "Natural Language :: English",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
]
repository = "https://github.com/aws-powertools/powertools-lambda-python"
documentation = "https://docs.powertools.aws.dev/lambda/python/"
readme = "README.md"
-keywords = ["aws_lambda_powertools", "aws", "tracing", "logging", "lambda", "powertools", "feature_flags", "idempotency", "middleware"]
+keywords = [
+ "aws_lambda_powertools",
+ "aws",
+ "tracing",
+ "logging",
+ "lambda",
+ "powertools",
+ "feature_flags",
+ "idempotency",
+ "middleware",
+]
# MIT-0 is not recognized as an existing license from poetry.
# By using `MIT` as a license value, a `License :: OSI Approved :: MIT License` classifier is added to the classifiers list.
license = "MIT"
@@ -35,6 +45,7 @@ pydantic = { version = "^1.8.2", optional = true }
boto3 = { version = "^1.20.32", optional = true }
typing-extensions = "^4.6.2"
datadog-lambda = { version = "^4.77.0", optional = true }
+aws-encryption-sdk = { version = "^3.1.1", optional = true }
[tool.poetry.dev-dependencies]
coverage = {extras = ["toml"], version = "^7.2"}
@@ -57,15 +68,15 @@ aws-cdk-lib = "^2.88.0"
"aws-cdk.aws-apigatewayv2-integrations-alpha" = "^2.38.1-alpha.0"
"aws-cdk.aws-apigatewayv2-authorizers-alpha" = "^2.38.1-alpha.0"
pytest-benchmark = "^4.0.0"
-mypy-boto3-appconfig = "^1.28.52"
+mypy-boto3-appconfig = "^1.28.60"
mypy-boto3-cloudformation = "^1.28.48"
mypy-boto3-cloudwatch = "^1.28.36"
-mypy-boto3-dynamodb = "^1.28.36"
-mypy-boto3-lambda = "^1.28.36"
+mypy-boto3-dynamodb = "^1.28.55"
+mypy-boto3-lambda = "^1.28.63"
mypy-boto3-logs = "^1.28.52"
mypy-boto3-secretsmanager = "^1.28.36"
-mypy-boto3-ssm = "^1.28.36"
-mypy-boto3-s3 = "^1.28.52"
+mypy-boto3-ssm = "^1.28.54"
+mypy-boto3-s3 = "^1.28.55"
mypy-boto3-xray = "^1.28.47"
types-requests = "^2.31.0"
typing-extensions = "^4.6.2"
@@ -86,20 +97,27 @@ tracer = ["aws-xray-sdk"]
all = ["pydantic", "aws-xray-sdk", "fastjsonschema"]
# allow customers to run code locally without emulators (SAM CLI, etc.)
aws-sdk = ["boto3"]
-datadog=["datadog-lambda"]
+datadog = ["datadog-lambda"]
+datamasking-aws-sdk = ["aws-encryption-sdk"]
[tool.poetry.group.dev.dependencies]
-cfn-lint = "0.80.2"
+cfn-lint = "0.81.0"
mypy = "^1.1.1"
types-python-dateutil = "^2.8.19.6"
httpx = ">=0.23.3,<0.25.0"
sentry-sdk = "^1.22.2"
-ruff = ">=0.0.272,<0.0.291"
+ruff = ">=0.0.272,<0.0.293"
retry2 = "^0.9.5"
+pytest-socket = "^0.6.0"
[tool.coverage.run]
source = ["aws_lambda_powertools"]
-omit = ["tests/*", "aws_lambda_powertools/exceptions/*", "aws_lambda_powertools/utilities/parser/types.py", "aws_lambda_powertools/utilities/jmespath_utils/envelopes.py"]
+omit = [
+ "tests/*",
+ "aws_lambda_powertools/exceptions/*",
+ "aws_lambda_powertools/utilities/parser/types.py",
+ "aws_lambda_powertools/utilities/jmespath_utils/envelopes.py",
+]
branch = true
[tool.coverage.html]
@@ -109,26 +127,26 @@ title = "Powertools for AWS Lambda (Python) Test Coverage"
[tool.coverage.report]
fail_under = 90
exclude_lines = [
- # Have to re-enable the standard pragma
- "pragma: no cover",
+ # Have to re-enable the standard pragma
+ "pragma: no cover",
- # Don't complain about missing debug-only code:
- "def __repr__",
- "if self.debug",
+ # Don't complain about missing debug-only code:
+ "def __repr__",
+ "if self.debug",
- # Don't complain if tests don't hit defensive assertion code:
- "raise AssertionError",
- "raise NotImplementedError",
+ # Don't complain if tests don't hit defensive assertion code:
+ "raise AssertionError",
+ "raise NotImplementedError",
- # Don't complain if non-runnable code isn't run:
- "if 0:",
- "if __name__ == .__main__.:",
+ # Don't complain if non-runnable code isn't run:
+ "if 0:",
+ "if __name__ == .__main__.:",
- # Ignore runtime type checking
- "if TYPE_CHECKING:",
+ # Ignore runtime type checking
+ "if TYPE_CHECKING:",
- # Ignore type function overload
- "@overload",
+ # Ignore type function overload
+ "@overload",
]
[tool.isort]
@@ -161,16 +179,16 @@ minversion = "6.0"
addopts = "-ra -vv"
testpaths = "./tests"
markers = [
- "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')",
+ "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')",
]
# MAINTENANCE: Remove these lines when drop support to Pydantic v1
-filterwarnings=[
+filterwarnings = [
"ignore:.*The `parse_obj` method is deprecated*:DeprecationWarning",
"ignore:.*The `parse_raw` method is deprecated*:DeprecationWarning",
"ignore:.*load_str_bytes is deprecated*:DeprecationWarning",
"ignore:.*The `dict` method is deprecated; use `model_dump` instead*:DeprecationWarning",
- "ignore:.*Pydantic V1 style `@validator` validators are deprecated*:DeprecationWarning"
+ "ignore:.*Pydantic V1 style `@validator` validators are deprecated*:DeprecationWarning",
]
[build-system]
diff --git a/tests/e2e/data_masking/__init__.py b/tests/e2e/data_masking/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/e2e/data_masking/conftest.py b/tests/e2e/data_masking/conftest.py
new file mode 100644
index 00000000000..f1892d7c0c9
--- /dev/null
+++ b/tests/e2e/data_masking/conftest.py
@@ -0,0 +1,19 @@
+import pytest
+
+from tests.e2e.data_masking.infrastructure import DataMaskingStack
+
+
+@pytest.fixture(autouse=True, scope="package")
+def infrastructure():
+ """Setup and teardown logic for E2E test infrastructure
+
+ Yields
+ ------
+ Dict[str, str]
+ CloudFormation Outputs from deployed infrastructure
+ """
+ stack = DataMaskingStack()
+ try:
+ yield stack.deploy()
+ finally:
+ stack.delete()
diff --git a/tests/e2e/data_masking/handlers/basic_handler.py b/tests/e2e/data_masking/handlers/basic_handler.py
new file mode 100644
index 00000000000..f31e822429a
--- /dev/null
+++ b/tests/e2e/data_masking/handlers/basic_handler.py
@@ -0,0 +1,23 @@
+from aws_lambda_powertools import Logger
+from aws_lambda_powertools.utilities._data_masking import DataMasking
+from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider
+
+logger = Logger()
+
+
+@logger.inject_lambda_context
+def lambda_handler(event, context):
+ # Generating logs for test_encryption_in_logs test
+ message, append_keys = event.get("message", ""), event.get("append_keys", {})
+ logger.append_keys(**append_keys)
+ logger.info(message)
+
+ # Encrypting data for test_encryption_in_handler test
+ kms_key = event.get("kms_key", "")
+ data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[kms_key]))
+ value = [1, 2, "string", 4.5]
+ encrypted_data = data_masker.encrypt(value)
+ response = {}
+ response["encrypted_data"] = encrypted_data
+
+ return response
diff --git a/tests/e2e/data_masking/infrastructure.py b/tests/e2e/data_masking/infrastructure.py
new file mode 100644
index 00000000000..ee18b272450
--- /dev/null
+++ b/tests/e2e/data_masking/infrastructure.py
@@ -0,0 +1,20 @@
+import aws_cdk.aws_kms as kms
+from aws_cdk import CfnOutput, Duration
+from aws_cdk import aws_iam as iam
+
+from tests.e2e.utils.infrastructure import BaseInfrastructure
+
+
+class DataMaskingStack(BaseInfrastructure):
+ def create_resources(self):
+ functions = self.create_lambda_functions(function_props={"timeout": Duration.seconds(10)})
+
+ key1 = kms.Key(self.stack, "MyKMSKey1", description="My KMS Key1")
+ CfnOutput(self.stack, "KMSKey1Arn", value=key1.key_arn, description="ARN of the created KMS Key1")
+
+ key2 = kms.Key(self.stack, "MyKMSKey2", description="My KMS Key2")
+ CfnOutput(self.stack, "KMSKey2Arn", value=key2.key_arn, description="ARN of the created KMS Key2")
+
+ functions["BasicHandler"].add_to_role_policy(
+ iam.PolicyStatement(effect=iam.Effect.ALLOW, actions=["kms:*"], resources=[key1.key_arn, key2.key_arn]),
+ )
diff --git a/tests/e2e/data_masking/test_e2e_data_masking.py b/tests/e2e/data_masking/test_e2e_data_masking.py
new file mode 100644
index 00000000000..80f45564177
--- /dev/null
+++ b/tests/e2e/data_masking/test_e2e_data_masking.py
@@ -0,0 +1,153 @@
+import json
+from uuid import uuid4
+
+import pytest
+from aws_encryption_sdk.exceptions import DecryptKeyError
+
+from aws_lambda_powertools.utilities._data_masking import DataMasking
+from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import (
+ AwsEncryptionSdkProvider,
+ ContextMismatchError,
+)
+from tests.e2e.utils import data_fetcher
+
+pytest.skip(reason="Data masking tests disabled until we go GA.", allow_module_level=True)
+
+
+@pytest.fixture
+def basic_handler_fn(infrastructure: dict) -> str:
+ return infrastructure.get("BasicHandler", "")
+
+
+@pytest.fixture
+def basic_handler_fn_arn(infrastructure: dict) -> str:
+ return infrastructure.get("BasicHandlerArn", "")
+
+
+@pytest.fixture
+def kms_key1_arn(infrastructure: dict) -> str:
+ return infrastructure.get("KMSKey1Arn", "")
+
+
+@pytest.fixture
+def kms_key2_arn(infrastructure: dict) -> str:
+ return infrastructure.get("KMSKey2Arn", "")
+
+
+@pytest.fixture
+def data_masker(kms_key1_arn) -> DataMasking:
+ return DataMasking(provider=AwsEncryptionSdkProvider(keys=[kms_key1_arn]))
+
+
+@pytest.mark.xdist_group(name="data_masking")
+def test_encryption(data_masker):
+ # GIVEN an instantiation of DataMasking with the AWS encryption provider
+
+ # AWS Encryption SDK encrypt method only takes in bytes or strings
+ value = [1, 2, "string", 4.5]
+
+ # WHEN encrypting and then decrypting the encrypted data
+ encrypted_data = data_masker.encrypt(value)
+ decrypted_data = data_masker.decrypt(encrypted_data)
+
+ # THEN the result is the original input data
+ assert decrypted_data == value
+
+
+@pytest.mark.xdist_group(name="data_masking")
+def test_encryption_context(data_masker):
+ # GIVEN an instantiation of DataMasking with the AWS encryption provider
+
+ value = [1, 2, "string", 4.5]
+ context = {"this": "is_secure"}
+
+ # WHEN encrypting and then decrypting the encrypted data with an encryption_context
+ encrypted_data = data_masker.encrypt(value, encryption_context=context)
+ decrypted_data = data_masker.decrypt(encrypted_data, encryption_context=context)
+
+ # THEN the result is the original input data
+ assert decrypted_data == value
+
+
+@pytest.mark.xdist_group(name="data_masking")
+def test_encryption_context_mismatch(data_masker):
+ # GIVEN an instantiation of DataMasking with the AWS encryption provider
+
+ value = [1, 2, "string", 4.5]
+
+ # WHEN encrypting with a encryption_context
+ encrypted_data = data_masker.encrypt(value, encryption_context={"this": "is_secure"})
+
+ # THEN decrypting with a different encryption_context should raise a ContextMismatchError
+ with pytest.raises(ContextMismatchError):
+ data_masker.decrypt(encrypted_data, encryption_context={"not": "same_context"})
+
+
+@pytest.mark.xdist_group(name="data_masking")
+def test_encryption_no_context_fail(data_masker):
+ # GIVEN an instantiation of DataMasking with the AWS encryption provider
+
+ value = [1, 2, "string", 4.5]
+
+ # WHEN encrypting with no encryption_context
+ encrypted_data = data_masker.encrypt(value)
+
+ # THEN decrypting with an encryption_context should raise a ContextMismatchError
+ with pytest.raises(ContextMismatchError):
+ data_masker.decrypt(encrypted_data, encryption_context={"this": "is_secure"})
+
+
+@pytest.mark.xdist_group(name="data_masking")
+def test_encryption_decryption_key_mismatch(data_masker, kms_key2_arn):
+ # GIVEN an instantiation of DataMasking with the AWS encryption provider with a certain key
+
+ # WHEN encrypting and then decrypting the encrypted data
+ value = [1, 2, "string", 4.5]
+ encrypted_data = data_masker.encrypt(value)
+
+ # THEN when decrypting with a different key it should fail
+ data_masker_key2 = DataMasking(provider=AwsEncryptionSdkProvider(keys=[kms_key2_arn]))
+
+ with pytest.raises(DecryptKeyError):
+ data_masker_key2.decrypt(encrypted_data)
+
+
+@pytest.mark.xdist_group(name="data_masking")
+def test_encryption_in_logs(data_masker, basic_handler_fn, basic_handler_fn_arn, kms_key1_arn):
+ # GIVEN an instantiation of DataMasking with the AWS encryption provider
+
+ # WHEN encrypting a value and logging it
+ value = [1, 2, "string", 4.5]
+ encrypted_data = data_masker.encrypt(value)
+ message = encrypted_data
+ custom_key = "order_id"
+ additional_keys = {custom_key: f"{uuid4()}"}
+ payload = json.dumps({"message": message, "kms_key": kms_key1_arn, "append_keys": additional_keys})
+
+ _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=payload)
+ data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=payload)
+
+ logs = data_fetcher.get_logs(function_name=basic_handler_fn, start_time=execution_time, minimum_log_entries=2)
+
+ # THEN decrypting it from the logs should show the original value
+ for log in logs.get_log(key=custom_key):
+ encrypted_data = log.message
+ decrypted_data = data_masker.decrypt(encrypted_data)
+ assert decrypted_data == value
+
+
+@pytest.mark.xdist_group(name="data_masking")
+def test_encryption_in_handler(data_masker, basic_handler_fn_arn, kms_key1_arn):
+ # GIVEN a lambda_handler with an instantiation the AWS encryption provider data masker
+
+ payload = {"kms_key": kms_key1_arn}
+
+ # WHEN the handler is invoked to encrypt data
+ handler_result, _ = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=json.dumps(payload))
+
+ response = json.loads(handler_result["Payload"].read())
+ encrypted_data = response["encrypted_data"]
+ decrypted_data = data_masker.decrypt(encrypted_data)
+
+ # THEN decrypting the encrypted data from the response should result in the original value
+ assert decrypted_data == [1, 2, "string", 4.5]
diff --git a/tests/e2e/logger/handlers/tz_handler.py b/tests/e2e/logger/handlers/tz_handler.py
new file mode 100644
index 00000000000..06f6cfbf846
--- /dev/null
+++ b/tests/e2e/logger/handlers/tz_handler.py
@@ -0,0 +1,27 @@
+import os
+import time
+
+from aws_lambda_powertools import Logger
+
+
+def lambda_handler(event, context):
+ utc, datefmt, tz = event.get("utc", False), event.get("datefmt", None), event.get("tz", None)
+ if tz:
+ # set TZ envrionment variable to given tz param
+ os.environ["TZ"] = tz
+ time.tzset()
+
+ # init logger using given params
+ logger = Logger(service=f"{utc}-{datefmt}-{tz}", utc=utc, datefmt=datefmt)
+
+ # return the converter this Logger is using
+ if logger.handlers[0].formatter.converter == time.localtime:
+ loggerType = "localtime_converter"
+ elif logger.handlers[0].formatter.converter == time.gmtime:
+ loggerType = "gmtime_converter"
+ else:
+ loggerType = "unknown"
+
+ # print the message to Cloudwatch, timestamp included
+ logger.info(loggerType)
+ return "success"
diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py
index 80379125d11..3aa2433b696 100644
--- a/tests/e2e/logger/test_logger.py
+++ b/tests/e2e/logger/test_logger.py
@@ -1,4 +1,6 @@
import json
+import os
+import time
from uuid import uuid4
import pytest
@@ -17,6 +19,16 @@ def basic_handler_fn_arn(infrastructure: dict) -> str:
return infrastructure.get("BasicHandlerArn", "")
+@pytest.fixture
+def tz_handler_fn(infrastructure: dict) -> str:
+ return infrastructure.get("TzHandler", "")
+
+
+@pytest.fixture
+def tz_handler_fn_arn(infrastructure: dict) -> str:
+ return infrastructure.get("TzHandlerArn", "")
+
+
@pytest.mark.xdist_group(name="logger")
def test_basic_lambda_logs_visible(basic_handler_fn, basic_handler_fn_arn):
# GIVEN
@@ -36,3 +48,65 @@ def test_basic_lambda_logs_visible(basic_handler_fn, basic_handler_fn_arn):
assert len(logs.get_cold_start_log()) == 1
assert len(logs.get_log(key=custom_key)) == 2
assert logs.have_keys(*LOGGER_LAMBDA_CONTEXT_KEYS) is True
+
+
+@pytest.mark.xdist_group(name="logger")
+@pytest.mark.parametrize("tz", ["US/Eastern", "UTC", "Asia/Shanghai"])
+@pytest.mark.parametrize("datefmt", ["%z", None])
+def test_lambda_tz_with_utc(tz_handler_fn, tz_handler_fn_arn, tz, datefmt):
+ # GIVEN: UTC is set to True, indicating that the Lambda function must use UTC.
+ utc = True
+ payload = json.dumps({"utc": utc, "tz": tz, "datefmt": datefmt})
+
+ # WHEN invoking sample hander using combination of timezone and date format
+ _, execution_time = data_fetcher.get_lambda_response(lambda_arn=tz_handler_fn_arn, payload=payload)
+ data_fetcher.get_lambda_response(lambda_arn=tz_handler_fn_arn, payload=payload)
+
+ logs = data_fetcher.get_logs(
+ function_name=tz_handler_fn,
+ start_time=execution_time,
+ minimum_log_entries=1,
+ filter_expression='{ $.service = "' + f"{utc}-{datefmt}-{tz}" + '" }',
+ )
+ result_list = logs.logs
+
+ assert len(result_list) > 0
+ result = result_list[0]
+
+ # THEN Make sure that the result list of logs is not empty, indicating that logs were collected
+ # THEN Make sure that the message in the first log entry indicates the use of "gmtime_converter"
+ assert result.message == "gmtime_converter"
+ assert result.timestamp[-5:] == "+0000"
+
+
+@pytest.mark.xdist_group(name="logger")
+@pytest.mark.parametrize("tz", ["US/Eastern", "UTC", "Asia/Shanghai"])
+@pytest.mark.parametrize("datefmt", ["%z", None])
+def test_lambda_tz_without_utc(tz_handler_fn, tz_handler_fn_arn, tz, datefmt):
+ # GIVEN: UTC is set to False, indicating that the Lambda function should not use UTC.
+ utc = False
+ payload = json.dumps({"utc": utc, "tz": tz, "datefmt": datefmt})
+
+ # WHEN invoking sample handler using combination of timezone and date format
+ _, execution_time = data_fetcher.get_lambda_response(lambda_arn=tz_handler_fn_arn, payload=payload)
+ data_fetcher.get_lambda_response(lambda_arn=tz_handler_fn_arn, payload=payload)
+
+ logs = data_fetcher.get_logs(
+ function_name=tz_handler_fn,
+ start_time=execution_time,
+ minimum_log_entries=1,
+ filter_expression='{ $.service = "' + f"{utc}-{datefmt}-{tz}" + '" }',
+ )
+ result_list = logs.logs
+
+ # THEN Make sure that the result list of logs is not empty, indicating that logs were collected
+ # THEN Make sure that the message in the first log entry indicates the use of "localtime_converter"
+ # THEN Make sure that the timestamp in the first log entry matches the current time in the specified timezone
+ assert len(result_list) > 0
+ result = result_list[0]
+
+ assert result.message == "localtime_converter"
+
+ os.environ["TZ"] = tz
+ time.tzset()
+ assert result.timestamp[-5:] == time.strftime("%z", time.localtime())
diff --git a/tests/e2e/utils/lambda_layer/powertools_layer.py b/tests/e2e/utils/lambda_layer/powertools_layer.py
index 70870af200e..05147048676 100644
--- a/tests/e2e/utils/lambda_layer/powertools_layer.py
+++ b/tests/e2e/utils/lambda_layer/powertools_layer.py
@@ -1,6 +1,6 @@
-import logging
import subprocess
from pathlib import Path
+from typing import List
from aws_cdk.aws_lambda import Architecture
from checksumdir import dirhash
@@ -9,18 +9,20 @@
from tests.e2e.utils.constants import CDK_OUT_PATH, SOURCE_CODE_ROOT_PATH
from tests.e2e.utils.lambda_layer.base import BaseLocalLambdaLayer
-logger = logging.getLogger(__name__)
-
class LocalLambdaPowertoolsLayer(BaseLocalLambdaLayer):
IGNORE_EXTENSIONS = ["pyc"]
+ ARCHITECTURE_PLATFORM_MAPPING = {
+ Architecture.X86_64.name: ("manylinux_2_17_x86_64", "manylinux_2_28_x86_64"),
+ Architecture.ARM_64.name: ("manylinux_2_17_aarch64", "manylinux_2_28_aarch64"),
+ }
def __init__(self, output_dir: Path = CDK_OUT_PATH, architecture: Architecture = Architecture.X86_64):
super().__init__(output_dir)
self.package = f"{SOURCE_CODE_ROOT_PATH}[all]"
- platform_name = self._resolve_platform(architecture)
- self.build_args = f"--platform {platform_name} --only-binary=:all: --upgrade"
+ self.platform_args = self._resolve_platform(architecture)
+ self.build_args = f"{self.platform_args} --only-binary=:all: --upgrade"
self.build_command = f"python -m pip install {self.package} {self.build_args} --target {self.target_dir}"
self.cleanup_command = (
f"rm -rf {self.target_dir}/boto* {self.target_dir}/s3transfer* && "
@@ -62,16 +64,20 @@ def _has_source_changed(self) -> bool:
return False
def _resolve_platform(self, architecture: Architecture) -> str:
- """Returns the correct plaform name for the manylinux project (see PEP 599)
+ """Returns the correct pip platform tag argument for the manylinux project (see PEP 599)
Returns
-------
- platform_name : str
- The platform tag
+ str
+ pip's platform argument, e.g., --platform manylinux_2_17_x86_64 --platform manylinux_2_28_x86_64
"""
- if architecture.name == Architecture.X86_64.name:
- return "manylinux1_x86_64"
- elif architecture.name == Architecture.ARM_64.name:
- return "manylinux2014_aarch64"
- else:
- raise ValueError(f"unknown architecture {architecture.name}")
+ platforms = self.ARCHITECTURE_PLATFORM_MAPPING.get(architecture.name)
+ if not platforms:
+ raise ValueError(
+ f"unknown architecture {architecture.name}. Supported: {self.ARCHITECTURE_PLATFORM_MAPPING.keys()}",
+ )
+
+ return self._build_platform_args(platforms)
+
+ def _build_platform_args(self, platforms: List[str]):
+ return " ".join([f"--platform {platform}" for platform in platforms])
diff --git a/tests/events/vpcLatticeEventV2PathTrailingSlash.json b/tests/events/vpcLatticeEventV2PathTrailingSlash.json
new file mode 100644
index 00000000000..5f5fa7edd72
--- /dev/null
+++ b/tests/events/vpcLatticeEventV2PathTrailingSlash.json
@@ -0,0 +1,30 @@
+{
+ "version": "2.0",
+ "path": "/newpath/",
+ "method": "GET",
+ "headers": {
+ "user_agent": "curl/7.64.1",
+ "x-forwarded-for": "10.213.229.10",
+ "host": "test-lambda-service-3908sdf9u3u.dkfjd93.vpc-lattice-svcs.us-east-2.on.aws",
+ "accept": "*/*"
+ },
+ "query_string_parameters": {
+ "order-id": "1"
+ },
+ "body": "{\"message\": \"Hello from Lambda!\"}",
+ "isBase64Encoded": false,
+ "requestContext": {
+ "serviceNetworkArn": "arn:aws:vpc-lattice:us-east-2:123456789012:servicenetwork/sn-0bf3f2882e9cc805a",
+ "serviceArn": "arn:aws:vpc-lattice:us-east-2:123456789012:service/svc-0a40eebed65f8d69c",
+ "targetGroupArn": "arn:aws:vpc-lattice:us-east-2:123456789012:targetgroup/tg-6d0ecf831eec9f09",
+ "identity": {
+ "sourceVpcArn": "arn:aws:ec2:region:123456789012:vpc/vpc-0b8276c84697e7339",
+ "type" : "AWS_IAM",
+ "principal": "arn:aws:sts::123456789012:assumed-role/example-role/057d00f8b51257ba3c853a0f248943cf",
+ "sessionName": "057d00f8b51257ba3c853a0f248943cf",
+ "x509SanDns": "example.com"
+ },
+ "region": "us-east-2",
+ "timeEpoch": "1696331543569073"
+ }
+}
diff --git a/tests/events/vpcLatticeV2Event.json b/tests/events/vpcLatticeV2Event.json
new file mode 100644
index 00000000000..fe10d83a3af
--- /dev/null
+++ b/tests/events/vpcLatticeV2Event.json
@@ -0,0 +1,30 @@
+{
+ "version": "2.0",
+ "path": "/newpath",
+ "method": "GET",
+ "headers": {
+ "user_agent": "curl/7.64.1",
+ "x-forwarded-for": "10.213.229.10",
+ "host": "test-lambda-service-3908sdf9u3u.dkfjd93.vpc-lattice-svcs.us-east-2.on.aws",
+ "accept": "*/*"
+ },
+ "queryStringParameters": {
+ "order-id": "1"
+ },
+ "body": "{\"message\": \"Hello from Lambda!\"}",
+ "isBase64Encoded": false,
+ "requestContext": {
+ "serviceNetworkArn": "arn:aws:vpc-lattice:us-east-2:123456789012:servicenetwork/sn-0bf3f2882e9cc805a",
+ "serviceArn": "arn:aws:vpc-lattice:us-east-2:123456789012:service/svc-0a40eebed65f8d69c",
+ "targetGroupArn": "arn:aws:vpc-lattice:us-east-2:123456789012:targetgroup/tg-6d0ecf831eec9f09",
+ "identity": {
+ "sourceVpcArn": "arn:aws:ec2:region:123456789012:vpc/vpc-0b8276c84697e7339",
+ "type" : "AWS_IAM",
+ "principal": "arn:aws:sts::123456789012:assumed-role/example-role/057d00f8b51257ba3c853a0f248943cf",
+ "sessionName": "057d00f8b51257ba3c853a0f248943cf",
+ "x509SanDns": "example.com"
+ },
+ "region": "us-east-2",
+ "timeEpoch": "1696331543569073"
+ }
+}
diff --git a/tests/functional/data_masking/__init__.py b/tests/functional/data_masking/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/functional/data_masking/conftest.py b/tests/functional/data_masking/conftest.py
new file mode 100644
index 00000000000..f73ccca4113
--- /dev/null
+++ b/tests/functional/data_masking/conftest.py
@@ -0,0 +1,6 @@
+from pytest_socket import disable_socket
+
+
+def pytest_runtest_setup():
+ """Disable Unix and TCP sockets for Data masking tests"""
+ disable_socket()
diff --git a/tests/functional/data_masking/test_aws_encryption_sdk.py b/tests/functional/data_masking/test_aws_encryption_sdk.py
new file mode 100644
index 00000000000..978c2e21572
--- /dev/null
+++ b/tests/functional/data_masking/test_aws_encryption_sdk.py
@@ -0,0 +1,283 @@
+from __future__ import annotations
+
+import base64
+import json
+from typing import Any, Callable, Dict, Union
+
+import pytest
+
+from aws_lambda_powertools.utilities._data_masking import DataMasking
+from aws_lambda_powertools.utilities._data_masking.constants import DATA_MASKING_STRING
+from aws_lambda_powertools.utilities._data_masking.provider import BaseProvider
+from aws_lambda_powertools.utilities._data_masking.provider.kms import (
+ AwsEncryptionSdkProvider,
+)
+
+
+class FakeEncryptionKeyProvider(BaseProvider):
+ def __init__(
+ self,
+ json_serializer: Callable[[Dict], str] | None = None,
+ json_deserializer: Callable[[Union[Dict, str, bool, int, float]], str] | None = None,
+ ):
+ super().__init__(json_serializer=json_serializer, json_deserializer=json_deserializer)
+
+ def encrypt(self, data: bytes | str, **kwargs) -> str:
+ data = self.json_serializer(data)
+ ciphertext = base64.b64encode(data).decode()
+ return ciphertext
+
+ def decrypt(self, data: bytes, **kwargs) -> Any:
+ ciphertext_decoded = base64.b64decode(data)
+ ciphertext = self.json_deserializer(ciphertext_decoded)
+ return ciphertext
+
+
+@pytest.fixture
+def data_masker(monkeypatch) -> DataMasking:
+ """DataMasking using AWS Encryption SDK Provider with a fake client"""
+ fake_key_provider = FakeEncryptionKeyProvider()
+ provider = AwsEncryptionSdkProvider(
+ keys=["dummy"],
+ key_provider=fake_key_provider,
+ )
+ return DataMasking(provider=provider)
+
+
+def test_mask_int(data_masker):
+ # GIVEN an int data type
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask(42)
+
+ # THEN the result is the data masked
+ assert masked_string == DATA_MASKING_STRING
+
+
+def test_mask_float(data_masker):
+ # GIVEN a float data type
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask(4.2)
+
+ # THEN the result is the data masked
+ assert masked_string == DATA_MASKING_STRING
+
+
+def test_mask_bool(data_masker):
+ # GIVEN a bool data type
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask(True)
+
+ # THEN the result is the data masked
+ assert masked_string == DATA_MASKING_STRING
+
+
+def test_mask_none(data_masker):
+ # GIVEN a None data type
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask(None)
+
+ # THEN the result is the data masked
+ assert masked_string == DATA_MASKING_STRING
+
+
+def test_mask_str(data_masker):
+ # GIVEN a str data type
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask("this is a string")
+
+ # THEN the result is the data masked
+ assert masked_string == DATA_MASKING_STRING
+
+
+def test_mask_list(data_masker):
+ # GIVEN a list data type
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask([1, 2, "string", 3])
+
+ # THEN the result is the data masked, while maintaining type list
+ assert masked_string == [DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING]
+
+
+def test_mask_dict(data_masker):
+ # GIVEN a dict data type
+ data = {
+ "a": {
+ "1": {"None": "hello", "four": "world"},
+ "b": {"3": {"4": "goodbye", "e": "world"}},
+ },
+ }
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask(data)
+
+ # THEN the result is the data masked
+ assert masked_string == DATA_MASKING_STRING
+
+
+def test_mask_dict_with_fields(data_masker):
+ # GIVEN a dict data type
+ data = {
+ "a": {
+ "1": {"None": "hello", "four": "world"},
+ "b": {"3": {"4": "goodbye", "e": "world"}},
+ },
+ }
+
+ # WHEN mask is called with a list of fields specified
+ masked_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"])
+
+ # THEN the result is only the specified fields are masked
+ assert masked_string == {
+ "a": {
+ "1": {"None": DATA_MASKING_STRING, "four": "world"},
+ "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}},
+ },
+ }
+
+
+def test_mask_json_dict_with_fields(data_masker):
+ # GIVEN the data type is a json representation of a dictionary
+ data = json.dumps(
+ {
+ "a": {
+ "1": {"None": "hello", "four": "world"},
+ "b": {"3": {"4": "goodbye", "e": "world"}},
+ },
+ },
+ )
+
+ # WHEN mask is called with a list of fields specified
+ masked_json_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"])
+
+ # THEN the result is only the specified fields are masked
+ assert masked_json_string == {
+ "a": {
+ "1": {"None": DATA_MASKING_STRING, "four": "world"},
+ "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}},
+ },
+ }
+
+
+def test_encrypt_int(data_masker):
+ # GIVEN an int data type
+
+ # WHEN encrypting and then decrypting the encrypted data
+ encrypted_data = data_masker.encrypt(-1)
+ decrypted_data = data_masker.decrypt(encrypted_data)
+
+ # THEN the result is the original input data
+ assert decrypted_data == -1
+
+
+def test_encrypt_float(data_masker):
+ # GIVEN an float data type
+
+ # WHEN encrypting and then decrypting the encrypted data
+ encrypted_data = data_masker.encrypt(-1.11)
+ decrypted_data = data_masker.decrypt(encrypted_data)
+
+ # THEN the result is the original input data
+ assert decrypted_data == -1.11
+
+
+def test_encrypt_bool(data_masker):
+ # GIVEN an bool data type
+
+ # WHEN encrypting and then decrypting the encrypted data
+ encrypted_data = data_masker.encrypt(True)
+ decrypted_data = data_masker.decrypt(encrypted_data)
+
+ # THEN the result is the original input data
+ assert decrypted_data is True
+
+
+def test_encrypt_none(data_masker):
+ # GIVEN an none data type
+
+ # WHEN encrypting and then decrypting the encrypted data
+ encrypted_data = data_masker.encrypt(None)
+ decrypted_data = data_masker.decrypt(encrypted_data)
+
+ # THEN the result is the original input data
+ assert decrypted_data is None
+
+
+def test_encrypt_str(data_masker):
+ # GIVEN an str data type
+
+ # WHEN encrypting and then decrypting the encrypted data
+ encrypted_data = data_masker.encrypt("this is a string")
+ decrypted_data = data_masker.decrypt(encrypted_data)
+
+ # THEN the result is the original input data
+ assert decrypted_data == "this is a string"
+
+
+def test_encrypt_list(data_masker):
+ # GIVEN an list data type
+
+ # WHEN encrypting and then decrypting the encrypted data
+ encrypted_data = data_masker.encrypt([1, 2, "a string", 3.4])
+ decrypted_data = data_masker.decrypt(encrypted_data)
+
+ # THEN the result is the original input data
+ assert decrypted_data == [1, 2, "a string", 3.4]
+
+
+def test_encrypt_dict(data_masker):
+ # GIVEN an dict data type
+ data = {
+ "a": {
+ "1": {"None": "hello", "four": "world"},
+ "b": {"3": {"4": "goodbye", "e": "world"}},
+ },
+ }
+
+ # WHEN encrypting and then decrypting the encrypted data
+ encrypted_data = data_masker.encrypt(data)
+ decrypted_data = data_masker.decrypt(encrypted_data)
+
+ # THEN the result is the original input data
+ assert decrypted_data == data
+
+
+def test_encrypt_dict_with_fields(data_masker):
+ # GIVEN the data type is a dictionary
+ data = {
+ "a": {
+ "1": {"None": "hello", "four": "world"},
+ "b": {"3": {"4": "goodbye", "e": "world"}},
+ },
+ }
+
+ # WHEN encrypting and then decrypting the encrypted data
+ encrypted_data = data_masker.encrypt(data, fields=["a.1.None", "a.b.3.4"])
+ decrypted_data = data_masker.decrypt(encrypted_data, fields=["a.1.None", "a.b.3.4"])
+
+ # THEN the result is only the specified fields are masked
+ assert decrypted_data == data
+
+
+def test_encrypt_json_dict_with_fields(data_masker):
+ # GIVEN the data type is a json representation of a dictionary
+ data = json.dumps(
+ {
+ "a": {
+ "1": {"None": "hello", "four": "world"},
+ "b": {"3": {"4": "goodbye", "e": "world"}},
+ },
+ },
+ )
+
+ # WHEN encrypting and then decrypting the encrypted data
+ encrypted_data = data_masker.encrypt(data, fields=["a.1.None", "a.b.3.4"])
+ decrypted_data = data_masker.decrypt(encrypted_data, fields=["a.1.None", "a.b.3.4"])
+
+ # THEN the result is only the specified fields are masked
+ assert decrypted_data == json.loads(data)
diff --git a/tests/functional/event_handler/test_vpc_latticev2.py b/tests/functional/event_handler/test_vpc_latticev2.py
new file mode 100644
index 00000000000..e249b7d2ba1
--- /dev/null
+++ b/tests/functional/event_handler/test_vpc_latticev2.py
@@ -0,0 +1,77 @@
+from aws_lambda_powertools.event_handler import (
+ Response,
+ VPCLatticeV2Resolver,
+ content_types,
+)
+from aws_lambda_powertools.event_handler.api_gateway import CORSConfig
+from aws_lambda_powertools.utilities.data_classes import VPCLatticeEventV2
+from tests.functional.utils import load_event
+
+
+def test_vpclatticev2_event():
+ # GIVEN a VPC Lattice event
+ app = VPCLatticeV2Resolver()
+
+ @app.get("/newpath")
+ def foo():
+ assert isinstance(app.current_event, VPCLatticeEventV2)
+ assert app.lambda_context == {}
+ return Response(200, content_types.TEXT_HTML, "foo")
+
+ # WHEN calling the event handler
+ result = app(load_event("vpcLatticeV2Event.json"), {})
+
+ # THEN process event correctly
+ # AND set the current_event type as VPCLatticeEvent
+ assert result["statusCode"] == 200
+ assert result["headers"]["Content-Type"] == content_types.TEXT_HTML
+ assert result["body"] == "foo"
+
+
+def test_vpclatticev2_event_path_trailing_slash(json_dump):
+ # GIVEN a VPC Lattice event
+ app = VPCLatticeV2Resolver()
+
+ @app.get("/newpath")
+ def foo():
+ assert isinstance(app.current_event, VPCLatticeEventV2)
+ assert app.lambda_context == {}
+ return Response(200, content_types.TEXT_HTML, "foo")
+
+ # WHEN calling the event handler using path with trailing "/"
+ result = app(load_event("vpcLatticeEventV2PathTrailingSlash.json"), {})
+
+ # THEN
+ assert result["statusCode"] == 404
+ assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON
+ expected = {"statusCode": 404, "message": "Not found"}
+ assert result["body"] == json_dump(expected)
+
+
+def test_cors_preflight_body_is_empty_not_null():
+ # GIVEN CORS is configured
+ app = VPCLatticeV2Resolver(cors=CORSConfig())
+
+ event = {"path": "/my/request", "method": "OPTIONS", "headers": {}}
+
+ # WHEN calling the event handler
+ result = app(event, {})
+
+ # THEN there body should be empty strings
+ assert result["body"] == ""
+
+
+def test_vpclatticev2_url_no_matches():
+ # GIVEN a VPC Lattice event
+ app = VPCLatticeV2Resolver()
+
+ @app.post("/no_match")
+ def foo():
+ raise RuntimeError()
+
+ # WHEN calling the event handler
+ result = app(load_event("vpcLatticeV2Event.json"), {})
+
+ # THEN process event correctly
+ # AND return 404 because the event doesn't match any known route
+ assert result["statusCode"] == 404
diff --git a/tests/functional/parser/test_parser.py b/tests/functional/parser/test_parser.py
index 1f948655917..f265de14590 100644
--- a/tests/functional/parser/test_parser.py
+++ b/tests/functional/parser/test_parser.py
@@ -93,3 +93,28 @@ def handle_no_envelope(event: Union[Dict, str], _: LambdaContext):
return event
handle_no_envelope(dummy_event, LambdaContext())
+
+
+def test_parser_event_with_type_hint(dummy_event, dummy_schema):
+ @event_parser
+ def handler(event: dummy_schema, _: LambdaContext):
+ assert event.message == "hello world"
+
+ handler(dummy_event["payload"], LambdaContext())
+
+
+def test_parser_event_without_type_hint(dummy_event, dummy_schema):
+ @event_parser
+ def handler(event, _):
+ assert event.message == "hello world"
+
+ with pytest.raises(exceptions.InvalidModelTypeError):
+ handler(dummy_event["payload"], LambdaContext())
+
+
+def test_parser_event_with_type_hint_and_non_default_argument(dummy_event, dummy_schema):
+ @event_parser
+ def handler(evt: dummy_schema, _: LambdaContext):
+ assert evt.message == "hello world"
+
+ handler(dummy_event["payload"], LambdaContext())
diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py
index de589b9accc..47803b1e52d 100644
--- a/tests/functional/test_logger.py
+++ b/tests/functional/test_logger.py
@@ -959,3 +959,37 @@ def test_stream_defaults_to_stdout(service_name, capsys):
# NOTE: we can't assert on capsys.readouterr().err due to a known bug: https://github.com/pytest-dev/pytest/issues/5997
log = json.loads(capsys.readouterr().out.strip())
assert log["message"] == msg
+
+
+def test_logger_logs_stack_trace_with_default_value(service_name, stdout):
+ # GIVEN a Logger instance with serialize_stacktrace default value = True
+ logger = Logger(service=service_name, stream=stdout)
+
+ # WHEN invoking a Lambda
+ def handler(event, context):
+ try:
+ raise ValueError("something went wrong")
+ except Exception:
+ logger.exception("Received an exception")
+
+ # THEN we expect a "stack_trace" in log
+ handler({}, lambda_context)
+ log = capture_logging_output(stdout)
+ assert "stack_trace" in log
+
+
+def test_logger_logs_stack_trace_with_non_default_value(service_name, stdout):
+ # GIVEN a Logger instance with serialize_stacktrace = False
+ logger = Logger(service=service_name, stream=stdout, serialize_stacktrace=False)
+
+ # WHEN invoking a Lambda
+ def handler(event, context):
+ try:
+ raise ValueError("something went wrong")
+ except Exception:
+ logger.exception("Received an exception")
+
+ # THEN we expect a "stack_trace" not in log
+ handler({}, lambda_context)
+ log = capture_logging_output(stdout)
+ assert "stack_trace" not in log
diff --git a/tests/functional/test_logger_powertools_formatter.py b/tests/functional/test_logger_powertools_formatter.py
index 61c3f76efd4..62f3f36c025 100644
--- a/tests/functional/test_logger_powertools_formatter.py
+++ b/tests/functional/test_logger_powertools_formatter.py
@@ -6,13 +6,27 @@
import re
import string
import time
+from collections import namedtuple
import pytest
from aws_lambda_powertools import Logger
+from aws_lambda_powertools.logging.formatter import LambdaPowertoolsFormatter
from aws_lambda_powertools.logging.formatters.datadog import DatadogLogFormatter
+@pytest.fixture
+def lambda_context():
+ lambda_context = {
+ "function_name": "test",
+ "memory_limit_in_mb": 128,
+ "invoked_function_arn": "arn:aws:lambda:eu-west-1:809313241:function:test",
+ "aws_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72",
+ }
+
+ return namedtuple("LambdaContext", lambda_context.keys())(*lambda_context.values())
+
+
@pytest.fixture
def stdout():
return io.StringIO()
@@ -291,7 +305,15 @@ def test_log_in_utc(service_name):
logger = Logger(service=service_name, utc=True)
# THEN logging formatter time converter should use gmtime fn
- assert logger._logger.handlers[0].formatter.converter == time.gmtime
+ assert logger.handlers[0].formatter.converter == time.gmtime
+
+
+def test_log_with_localtime(service_name):
+ # GIVEN a logger where UTC is false
+ logger = Logger(service=service_name, utc=False)
+
+ # THEN logging formatter time converter should use localtime fn
+ assert logger.handlers[0].formatter.converter == time.localtime
@pytest.mark.parametrize("message", ["hello", 1.10, {}, [], True, object()])
@@ -350,3 +372,42 @@ def test_datadog_formatter_use_rfc3339_date(stdout, service_name):
log = capture_logging_output(stdout)
assert re.fullmatch(RFC3339_REGEX, log["timestamp"]) # "2022-10-27T17:42:26.841+0200"
+
+
+def test_logger_logs_stack_trace_with_formatter_default_value(service_name, stdout):
+ # GIVEN a Logger instance with LambdaPowertoolsFormatter set explictly
+ # GIVE serialize_stacktrace default value = True
+ logger = Logger(service=service_name, stream=stdout, logger_formatter=LambdaPowertoolsFormatter())
+
+ # WHEN invoking a Lambda
+ def handler(event, context):
+ try:
+ raise ValueError("something went wrong")
+ except Exception:
+ logger.exception("Received an exception")
+
+ # THEN we expect a "stack_trace" in log
+ handler({}, lambda_context)
+ log = capture_logging_output(stdout)
+ assert "stack_trace" in log
+
+
+def test_logger_logs_stack_trace_with_formatter_non_default_value(service_name, stdout):
+ # GIVEN a Logger instance with serialize_stacktrace = False
+ logger = Logger(
+ service=service_name,
+ stream=stdout,
+ logger_formatter=LambdaPowertoolsFormatter(serialize_stacktrace=False),
+ )
+
+ # WHEN invoking a Lambda
+ def handler(event, context):
+ try:
+ raise ValueError("something went wrong")
+ except Exception:
+ logger.exception("Received an exception")
+
+ # THEN we expect a "stack_trace" not in log
+ handler({}, lambda_context)
+ log = capture_logging_output(stdout)
+ assert "stack_trace" not in log
diff --git a/tests/performance/data_masking/data_masking_load_test.yaml b/tests/performance/data_masking/data_masking_load_test.yaml
new file mode 100644
index 00000000000..5f696d57114
--- /dev/null
+++ b/tests/performance/data_masking/data_masking_load_test.yaml
@@ -0,0 +1,32 @@
+config:
+ target: https://sebwc2y7gh.execute-api.us-west-2.amazonaws.com/Prod/function128
+ phases:
+ - duration: 60
+ arrivalRate: 1
+ rampTo: 5
+ name: Warm up phase
+ - duration: 60
+ arrivalRate: 5
+ rampTo: 10
+ name: Ramp up load
+ - duration: 30
+ arrivalRate: 10
+ rampTo: 30
+ name: Spike phase
+ # Load & configure a couple of useful plugins
+ # https://docs.art/reference/extensions
+ plugins:
+ apdex: {}
+ metrics-by-endpoint: {}
+ apdex:
+ threshold: 500
+scenarios:
+ - flow:
+ - loop:
+ - get:
+ url: "https://sebwc2y7gh.execute-api.us-west-2.amazonaws.com/Prod/function128"
+ - get:
+ url: "https://sebwc2y7gh.execute-api.us-west-2.amazonaws.com/Prod/function1024"
+ - get:
+ url: "https://sebwc2y7gh.execute-api.us-west-2.amazonaws.com/Prod/function1769"
+ count: 100
\ No newline at end of file
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/.gitignore b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/.gitignore
new file mode 100644
index 00000000000..4c7a643c028
--- /dev/null
+++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/.gitignore
@@ -0,0 +1,243 @@
+# Created by https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode
+
+### Linux ###
+*~
+
+# temporary files which can be created if a process still has a handle open of a deleted file
+.fuse_hidden*
+
+# KDE directory preferences
+.directory
+
+# Linux trash folder which might appear on any partition or disk
+.Trash-*
+
+# .nfs files are created when an open file is removed but is still being accessed
+.nfs*
+
+### OSX ###
+*.DS_Store
+.AppleDouble
+.LSOverride
+
+# Icon must end with two \r
+Icon
+
+# Thumbnails
+._*
+
+# Files that might appear in the root of a volume
+.DocumentRevisions-V100
+.fseventsd
+.Spotlight-V100
+.TemporaryItems
+.Trashes
+.VolumeIcon.icns
+.com.apple.timemachine.donotpresent
+
+# Directories potentially created on remote AFP share
+.AppleDB
+.AppleDesktop
+Network Trash Folder
+Temporary Items
+.apdisk
+
+### PyCharm ###
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+
+# User-specific stuff:
+.idea/**/workspace.xml
+.idea/**/tasks.xml
+.idea/dictionaries
+
+# Sensitive or high-churn files:
+.idea/**/dataSources/
+.idea/**/dataSources.ids
+.idea/**/dataSources.xml
+.idea/**/dataSources.local.xml
+.idea/**/sqlDataSources.xml
+.idea/**/dynamic.xml
+.idea/**/uiDesigner.xml
+
+# Gradle:
+.idea/**/gradle.xml
+.idea/**/libraries
+
+# CMake
+cmake-build-debug/
+
+# Mongo Explorer plugin:
+.idea/**/mongoSettings.xml
+
+## File-based project format:
+*.iws
+
+## Plugin-specific files:
+
+# IntelliJ
+/out/
+
+# mpeltonen/sbt-idea plugin
+.idea_modules/
+
+# JIRA plugin
+atlassian-ide-plugin.xml
+
+# Cursive Clojure plugin
+.idea/replstate.xml
+
+# Ruby plugin and RubyMine
+/.rakeTasks
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+com_crashlytics_export_strings.xml
+crashlytics.properties
+crashlytics-build.properties
+fabric.properties
+
+### PyCharm Patch ###
+# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
+
+# *.iml
+# modules.xml
+# .idea/misc.xml
+# *.ipr
+
+# Sonarlint plugin
+.idea/sonarlint
+
+### Python ###
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+.pytest_cache/
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+
+# Translations
+*.mo
+*.pot
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule.*
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+
+### VisualStudioCode ###
+.vscode/*
+!.vscode/settings.json
+!.vscode/tasks.json
+!.vscode/launch.json
+!.vscode/extensions.json
+.history
+
+### Windows ###
+# Windows thumbnail cache files
+Thumbs.db
+ehthumbs.db
+ehthumbs_vista.db
+
+# Folder config file
+Desktop.ini
+
+# Recycle Bin used on file shares
+$RECYCLE.BIN/
+
+# Windows Installer files
+*.cab
+*.msi
+*.msm
+*.msp
+
+# Windows shortcuts
+*.lnk
+
+# Build folder
+
+*/build/*
+
+# End of https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/README.md b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/README.md
new file mode 100644
index 00000000000..aed9d43976d
--- /dev/null
+++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/README.md
@@ -0,0 +1,163 @@
+# pt-load-test-stack
+
+Congratulations, you have just created a Serverless "Hello World" application using the AWS Serverless Application Model (AWS SAM) for the `python3.10` runtime, and options to bootstrap it with [**Powertools for AWS Lambda (Python)**](https://awslabs.github.io/aws-lambda-powertools-python/latest/) (Powertools for AWS Lambda (Python)) utilities for Logging, Tracing and Metrics.
+
+Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity.
+
+## Powertools for AWS Lambda (Python) features
+
+Powertools for AWS Lambda (Python) provides three core utilities:
+
+* **[Tracing](https://awslabs.github.io/aws-lambda-powertools-python/latest/core/tracer/)** - Decorators and utilities to trace Lambda function handlers, and both synchronous and asynchronous functions
+* **[Logging](https://awslabs.github.io/aws-lambda-powertools-python/latest/core/logger/)** - Structured logging made easier, and decorator to enrich structured logging with key Lambda context details
+* **[Metrics](https://awslabs.github.io/aws-lambda-powertools-python/latest/core/metrics/)** - Custom Metrics created asynchronously via CloudWatch Embedded Metric Format (EMF)
+
+Find the complete project's [documentation here](https://awslabs.github.io/aws-lambda-powertools-python).
+
+### Installing Powertools for AWS Lambda (Python)n
+
+With [pip](https://pip.pypa.io/en/latest/index.html) installed, run:
+
+```bash
+pip install aws-lambda-powertools
+```
+
+### Powertools for AWS Lambda (Python) Examples
+
+* [Tutorial](https://awslabs.github.io/aws-lambda-powertools-python/latest/tutorial)
+* [Serverless Shopping cart](https://github.com/aws-samples/aws-serverless-shopping-cart)
+* [Serverless Airline](https://github.com/aws-samples/aws-serverless-airline-booking)
+* [Serverless E-commerce platform](https://github.com/aws-samples/aws-serverless-ecommerce-platform)
+* [Serverless GraphQL Nanny Booking Api](https://github.com/trey-rosius/babysitter_api)
+
+## Working with this project
+
+This project contains source code and supporting files for a serverless application that you can deploy with the SAM CLI. It includes the following files and folders.
+
+* hello_world - Code for the application's Lambda function.
+* events - Invocation events that you can use to invoke the function.
+* tests - Unit tests for the application code.
+* template.yaml - A template that defines the application's AWS resources.
+
+The application uses several AWS resources, including Lambda functions and an API Gateway API. These resources are defined in the `template.yaml` file in this project. You can update the template to add AWS resources through the same deployment process that updates your application code.
+
+If you prefer to use an integrated development environment (IDE) to build and test your application, you can use the AWS Toolkit.
+The AWS Toolkit is an open source plug-in for popular IDEs that uses the SAM CLI to build and deploy serverless applications on AWS. The AWS Toolkit also adds a simplified step-through debugging experience for Lambda function code. See the following links to get started.
+
+* [CLion](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
+* [GoLand](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
+* [IntelliJ](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
+* [WebStorm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
+* [Rider](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
+* [PhpStorm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
+* [PyCharm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
+* [RubyMine](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
+* [DataGrip](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
+* [VS Code](https://docs.aws.amazon.com/toolkit-for-vscode/latest/userguide/welcome.html)
+* [Visual Studio](https://docs.aws.amazon.com/toolkit-for-visual-studio/latest/user-guide/welcome.html)
+
+### Deploy the sample application
+
+The Serverless Application Model Command Line Interface (SAM CLI) is an extension of the AWS CLI that adds functionality for building and testing Lambda applications. It uses Docker to run your functions in an Amazon Linux environment that matches Lambda. It can also emulate your application's build environment and API.
+
+To use the SAM CLI, you need the following tools.
+
+* SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html)
+* [Python 3 installed](https://www.python.org/downloads/)
+* Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community)
+
+To build and deploy your application for the first time, run the following in your shell:
+
+```bash
+sam build --use-container
+sam deploy --guided
+```
+
+The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts:
+
+* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name.
+* **AWS Region**: The AWS region you want to deploy your app to.
+* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes.
+* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modifies IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command.
+* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application.
+
+You can find your API Gateway Endpoint URL in the output values displayed after deployment.
+
+### Use the SAM CLI to build and test locally
+
+Build your application with the `sam build --use-container` command.
+
+```bash
+pt-load-test-stack$ sam build --use-container
+```
+
+The SAM CLI installs dependencies defined in `hello_world/requirements.txt`, creates a deployment package, and saves it in the `.aws-sam/build` folder.
+
+Test a single function by invoking it directly with a test event. An event is a JSON document that represents the input that the function receives from the event source. Test events are included in the `events` folder in this project.
+
+Run functions locally and invoke them with the `sam local invoke` command.
+
+```bash
+pt-load-test-stack$ sam local invoke HelloWorldFunction --event events/event.json
+```
+
+The SAM CLI can also emulate your application's API. Use the `sam local start-api` to run the API locally on port 3000.
+
+```bash
+pt-load-test-stack$ sam local start-api
+pt-load-test-stack$ curl http://localhost:3000/
+```
+
+The SAM CLI reads the application template to determine the API's routes and the functions that they invoke. The `Events` property on each function's definition includes the route and method for each path.
+
+```yaml
+ Events:
+ HelloWorld:
+ Type: Api
+ Properties:
+ Path: /hello
+ Method: get
+```
+
+### Add a resource to your application
+
+The application template uses AWS Serverless Application Model (AWS SAM) to define application resources. AWS SAM is an extension of AWS CloudFormation with a simpler syntax for configuring common serverless application resources such as functions, triggers, and APIs. For resources not included in [the SAM specification](https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md), you can use standard [AWS CloudFormation](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html) resource types.
+
+### Fetch, tail, and filter Lambda function logs
+
+To simplify troubleshooting, SAM CLI has a command called `sam logs`. `sam logs` lets you fetch logs generated by your deployed Lambda function from the command line. In addition to printing the logs on the terminal, this command has several nifty features to help you quickly find the bug.
+
+`NOTE`: This command works for all AWS Lambda functions; not just the ones you deploy using SAM.
+
+```bash
+pt-load-test-stack$ sam logs -n HelloWorldFunction --stack-name pt-load-test-stack --tail
+```
+
+You can find more information and examples about filtering Lambda function logs in the [SAM CLI Documentation](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-logging.html).
+
+### Tests
+
+Tests are defined in the `tests` folder in this project. Use PIP to install the test dependencies and run tests.
+
+```bash
+pt-load-test-stack$ pip install -r tests/requirements.txt --user
+# unit test
+pt-load-test-stack$ python -m pytest tests/unit -v
+# integration test, requiring deploying the stack first.
+# Create the env variable AWS_SAM_STACK_NAME with the name of the stack we are testing
+pt-load-test-stack$ AWS_SAM_STACK_NAME="pt-load-test-stack" python -m pytest tests/integration -v
+```
+
+### Cleanup
+
+To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following:
+
+```bash
+sam delete --stack-name "pt-load-test-stack"
+```
+
+## Resources
+
+See the [AWS SAM developer guide](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/what-is-sam.html) for an introduction to SAM specification, the SAM CLI, and serverless application concepts.
+
+Next, you can use AWS Serverless Application Repository to deploy ready to use Apps that go beyond hello world samples and learn how authors developed their applications: [AWS Serverless Application Repository main page](https://aws.amazon.com/serverless/serverlessrepo/)
\ No newline at end of file
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/__init__.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/events/hello.json b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/events/hello.json
new file mode 100644
index 00000000000..fdb5180fe0a
--- /dev/null
+++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/events/hello.json
@@ -0,0 +1,111 @@
+{
+ "body":"",
+ "headers":{
+ "Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
+ "Accept-Encoding":"gzip, deflate, br",
+ "Accept-Language":"pt-BR,pt;q=0.9,en-US;q=0.8,en;q=0.7",
+ "Cache-Control":"max-age=0",
+ "Connection":"keep-alive",
+ "Host":"127.0.0.1:3000",
+ "Sec-Ch-Ua":"\"Google Chrome\";v=\"105\", \"Not)A;Brand\";v=\"8\", \"Chromium\";v=\"105\"",
+ "Sec-Ch-Ua-Mobile":"?0",
+ "Sec-Ch-Ua-Platform":"\"Linux\"",
+ "Sec-Fetch-Dest":"document",
+ "Sec-Fetch-Mode":"navigate",
+ "Sec-Fetch-Site":"none",
+ "Sec-Fetch-User":"?1",
+ "Upgrade-Insecure-Requests":"1",
+ "User-Agent":"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36",
+ "X-Forwarded-Port":"3000",
+ "X-Forwarded-Proto":"http"
+ },
+ "httpMethod":"GET",
+ "isBase64Encoded": false,
+ "multiValueHeaders":{
+ "Accept":[
+ "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
+ ],
+ "Accept-Encoding":[
+ "gzip, deflate, br"
+ ],
+ "Accept-Language":[
+ "pt-BR,pt;q=0.9,en-US;q=0.8,en;q=0.7"
+ ],
+ "Cache-Control":[
+ "max-age=0"
+ ],
+ "Connection":[
+ "keep-alive"
+ ],
+ "Host":[
+ "127.0.0.1:3000"
+ ],
+ "Sec-Ch-Ua":[
+ "\"Google Chrome\";v=\"105\", \"Not)A;Brand\";v=\"8\", \"Chromium\";v=\"105\""
+ ],
+ "Sec-Ch-Ua-Mobile":[
+ "?0"
+ ],
+ "Sec-Ch-Ua-Platform":[
+ "\"Linux\""
+ ],
+ "Sec-Fetch-Dest":[
+ "document"
+ ],
+ "Sec-Fetch-Mode":[
+ "navigate"
+ ],
+ "Sec-Fetch-Site":[
+ "none"
+ ],
+ "Sec-Fetch-User":[
+ "?1"
+ ],
+ "Upgrade-Insecure-Requests":[
+ "1"
+ ],
+ "User-Agent":[
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36"
+ ],
+ "X-Forwarded-Port":[
+ "3000"
+ ],
+ "X-Forwarded-Proto":[
+ "http"
+ ]
+ },
+ "multiValueQueryStringParameters":"",
+ "path":"/hello",
+ "pathParameters":"",
+ "queryStringParameters":"",
+ "requestContext":{
+ "accountId":"123456789012",
+ "apiId":"1234567890",
+ "domainName":"127.0.0.1:3000",
+ "extendedRequestId":"",
+ "httpMethod":"GET",
+ "identity":{
+ "accountId":"",
+ "apiKey":"",
+ "caller":"",
+ "cognitoAuthenticationProvider":"",
+ "cognitoAuthenticationType":"",
+ "cognitoIdentityPoolId":"",
+ "sourceIp":"127.0.0.1",
+ "user":"",
+ "userAgent":"Custom User Agent String",
+ "userArn":""
+ },
+ "path":"/hello",
+ "protocol":"HTTP/1.1",
+ "requestId":"a3590457-cac2-4f10-8fc9-e47114bf7c62",
+ "requestTime":"02/Feb/2023:11:45:26 +0000",
+ "requestTimeEpoch":1675338326,
+ "resourceId":"123456",
+ "resourcePath":"/hello",
+ "stage":"Prod"
+ },
+ "resource":"/hello",
+ "stageVariables":"",
+ "version":"1.0"
+ }
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/__init__.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py
new file mode 100644
index 00000000000..9a898ea10cd
--- /dev/null
+++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py
@@ -0,0 +1,60 @@
+import os
+
+from aws_lambda_powertools import Logger, Tracer
+from aws_lambda_powertools.event_handler import APIGatewayRestResolver
+from aws_lambda_powertools.logging import correlation_paths
+from aws_lambda_powertools.utilities._data_masking import DataMasking
+from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+KMS_KEY_ARN = os.environ["KMS_KEY_ARN"]
+
+json_blob = {
+ "id": 1,
+ "name": "John Doe",
+ "age": 30,
+ "email": "johndoe@example.com",
+ "address": {"street": "123 Main St", "city": "Anytown", "state": "CA", "zip": "12345"},
+ "phone_numbers": ["+1-555-555-1234", "+1-555-555-5678"],
+ "interests": ["Hiking", "Traveling", "Photography", "Reading"],
+ "job_history": {
+ "company": {
+ "company_name": "Acme Inc.",
+ "company_address": "5678 Interview Dr.",
+ },
+ "position": "Software Engineer",
+ "start_date": "2015-01-01",
+ "end_date": "2017-12-31",
+ },
+ "about_me": """
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla tincidunt velit quis
+ sapien mollis, at egestas massa tincidunt. Suspendisse ultrices arcu a dolor dapibus,
+ ut pretium turpis volutpat. Vestibulum at sapien quis sapien dignissim volutpat ut a enim.
+ Praesent fringilla sem eu dui convallis luctus. Donec ullamcorper, sapien ut convallis congue,
+ risus mauris pretium tortor, nec dignissim arcu urna a nisl. Vivamus non fermentum ex. Proin
+ interdum nisi id sagittis egestas. Nam sit amet nisi nec quam pharetra sagittis. Aliquam erat
+ volutpat. Donec nec luctus sem, nec ornare lorem. Vivamus vitae orci quis enim faucibus placerat.
+ Nulla facilisi. Proin in turpis orci. Donec imperdiet velit ac tellus gravida, eget laoreet tellus
+ malesuada. Praesent venenatis tellus ac urna blandit, at varius felis posuere. Integer a commodo nunc.
+ """,
+}
+
+app = APIGatewayRestResolver()
+tracer = Tracer()
+logger = Logger()
+
+
+@app.get("/function1024")
+@tracer.capture_method
+def function1024():
+ logger.info("Hello world function1024 - HTTP 200")
+ data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[KMS_KEY_ARN]))
+ encrypted = data_masker.encrypt(json_blob, fields=["address.street", "job_history.company.company_name"])
+ decrypted = data_masker.decrypt(encrypted, fields=["address.street", "job_history.company.company_name"])
+ return {"Decrypted_json_blob_function_1024": decrypted}
+
+
+@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST)
+@tracer.capture_lambda_handler
+def lambda_handler(event: dict, context: LambdaContext) -> dict:
+ return app.resolve(event, context)
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/requirements.txt b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/requirements.txt
new file mode 100644
index 00000000000..b74b60fc263
--- /dev/null
+++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/requirements.txt
@@ -0,0 +1,3 @@
+requests
+aws-lambda-powertools[tracer]
+aws-encryption-sdk
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/__init__.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py
new file mode 100644
index 00000000000..6b8250579a5
--- /dev/null
+++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py
@@ -0,0 +1,60 @@
+import os
+
+from aws_lambda_powertools import Logger, Tracer
+from aws_lambda_powertools.event_handler import APIGatewayRestResolver
+from aws_lambda_powertools.logging import correlation_paths
+from aws_lambda_powertools.utilities._data_masking import DataMasking
+from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+KMS_KEY_ARN = os.environ["KMS_KEY_ARN"]
+
+json_blob = {
+ "id": 1,
+ "name": "John Doe",
+ "age": 30,
+ "email": "johndoe@example.com",
+ "address": {"street": "123 Main St", "city": "Anytown", "state": "CA", "zip": "12345"},
+ "phone_numbers": ["+1-555-555-1234", "+1-555-555-5678"],
+ "interests": ["Hiking", "Traveling", "Photography", "Reading"],
+ "job_history": {
+ "company": {
+ "company_name": "Acme Inc.",
+ "company_address": "5678 Interview Dr.",
+ },
+ "position": "Software Engineer",
+ "start_date": "2015-01-01",
+ "end_date": "2017-12-31",
+ },
+ "about_me": """
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla tincidunt velit quis
+ sapien mollis, at egestas massa tincidunt. Suspendisse ultrices arcu a dolor dapibus,
+ ut pretium turpis volutpat. Vestibulum at sapien quis sapien dignissim volutpat ut a enim.
+ Praesent fringilla sem eu dui convallis luctus. Donec ullamcorper, sapien ut convallis congue,
+ risus mauris pretium tortor, nec dignissim arcu urna a nisl. Vivamus non fermentum ex. Proin
+ interdum nisi id sagittis egestas. Nam sit amet nisi nec quam pharetra sagittis. Aliquam erat
+ volutpat. Donec nec luctus sem, nec ornare lorem. Vivamus vitae orci quis enim faucibus placerat.
+ Nulla facilisi. Proin in turpis orci. Donec imperdiet velit ac tellus gravida, eget laoreet tellus
+ malesuada. Praesent venenatis tellus ac urna blandit, at varius felis posuere. Integer a commodo nunc.
+ """,
+}
+
+app = APIGatewayRestResolver()
+tracer = Tracer()
+logger = Logger()
+
+
+@app.get("/function128")
+@tracer.capture_method
+def function128():
+ logger.info("Hello world function128 - HTTP 200")
+ data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[KMS_KEY_ARN]))
+ encrypted = data_masker.encrypt(json_blob, fields=["address.street", "job_history.company.company_name"])
+ decrypted = data_masker.decrypt(encrypted, fields=["address.street", "job_history.company.company_name"])
+ return {"Decrypted_json_blob_function_128": decrypted}
+
+
+@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST)
+@tracer.capture_lambda_handler
+def lambda_handler(event: dict, context: LambdaContext) -> dict:
+ return app.resolve(event, context)
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/requirements.txt b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/requirements.txt
new file mode 100644
index 00000000000..b74b60fc263
--- /dev/null
+++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/requirements.txt
@@ -0,0 +1,3 @@
+requests
+aws-lambda-powertools[tracer]
+aws-encryption-sdk
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/__init__.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py
new file mode 100644
index 00000000000..623a1f7b232
--- /dev/null
+++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py
@@ -0,0 +1,60 @@
+import os
+
+from aws_lambda_powertools import Logger, Tracer
+from aws_lambda_powertools.event_handler import APIGatewayRestResolver
+from aws_lambda_powertools.logging import correlation_paths
+from aws_lambda_powertools.utilities._data_masking import DataMasking
+from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+KMS_KEY_ARN = os.environ["KMS_KEY_ARN"]
+
+json_blob = {
+ "id": 1,
+ "name": "John Doe",
+ "age": 30,
+ "email": "johndoe@example.com",
+ "address": {"street": "123 Main St", "city": "Anytown", "state": "CA", "zip": "12345"},
+ "phone_numbers": ["+1-555-555-1234", "+1-555-555-5678"],
+ "interests": ["Hiking", "Traveling", "Photography", "Reading"],
+ "job_history": {
+ "company": {
+ "company_name": "Acme Inc.",
+ "company_address": "5678 Interview Dr.",
+ },
+ "position": "Software Engineer",
+ "start_date": "2015-01-01",
+ "end_date": "2017-12-31",
+ },
+ "about_me": """
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla tincidunt velit quis
+ sapien mollis, at egestas massa tincidunt. Suspendisse ultrices arcu a dolor dapibus,
+ ut pretium turpis volutpat. Vestibulum at sapien quis sapien dignissim volutpat ut a enim.
+ Praesent fringilla sem eu dui convallis luctus. Donec ullamcorper, sapien ut convallis congue,
+ risus mauris pretium tortor, nec dignissim arcu urna a nisl. Vivamus non fermentum ex. Proin
+ interdum nisi id sagittis egestas. Nam sit amet nisi nec quam pharetra sagittis. Aliquam erat
+ volutpat. Donec nec luctus sem, nec ornare lorem. Vivamus vitae orci quis enim faucibus placerat.
+ Nulla facilisi. Proin in turpis orci. Donec imperdiet velit ac tellus gravida, eget laoreet tellus
+ malesuada. Praesent venenatis tellus ac urna blandit, at varius felis posuere. Integer a commodo nunc.
+ """,
+}
+
+app = APIGatewayRestResolver()
+tracer = Tracer()
+logger = Logger()
+
+
+@app.get("/function1769")
+@tracer.capture_method
+def function1769():
+ logger.info("Hello world function1769 - HTTP 200")
+ data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[KMS_KEY_ARN]))
+ encrypted = data_masker.encrypt(json_blob, fields=["address.street", "job_history.company.company_name"])
+ decrypted = data_masker.decrypt(encrypted, fields=["address.street", "job_history.company.company_name"])
+ return {"Decrypted_json_blob_function_1769": decrypted}
+
+
+@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST)
+@tracer.capture_lambda_handler
+def lambda_handler(event: dict, context: LambdaContext) -> dict:
+ return app.resolve(event, context)
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/requirements.txt b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/requirements.txt
new file mode 100644
index 00000000000..b74b60fc263
--- /dev/null
+++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/requirements.txt
@@ -0,0 +1,3 @@
+requests
+aws-lambda-powertools[tracer]
+aws-encryption-sdk
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/samconfig.toml b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/samconfig.toml
new file mode 100644
index 00000000000..82f9cdc06d9
--- /dev/null
+++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/samconfig.toml
@@ -0,0 +1,34 @@
+# More information about the configuration file can be found here:
+# https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html
+version = 0.1
+
+[default]
+[default.global.parameters]
+stack_name = "pt-load-test-stack"
+
+[default.build.parameters]
+cached = true
+parallel = true
+
+[default.validate.parameters]
+lint = true
+
+[default.deploy.parameters]
+capabilities = "CAPABILITY_IAM"
+confirm_changeset = true
+resolve_s3 = true
+s3_prefix = "pt-load-test-stack"
+region = "us-west-2"
+image_repositories = []
+
+[default.package.parameters]
+resolve_s3 = true
+
+[default.sync.parameters]
+watch = true
+
+[default.local_start_api.parameters]
+warm_containers = "EAGER"
+
+[default.local_start_lambda.parameters]
+warm_containers = "EAGER"
diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml
new file mode 100644
index 00000000000..f2a6540c267
--- /dev/null
+++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml
@@ -0,0 +1,147 @@
+AWSTemplateFormatVersion: '2010-09-09'
+Transform: AWS::Serverless-2016-10-31
+Description: >
+ pt-load-test-stack
+
+ Powertools for AWS Lambda (Python) example
+
+Globals: # https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-specification-template-anatomy-globals.html
+ Function:
+ Timeout: 5
+ Runtime: python3.10
+
+ Tracing: Active
+ Api:
+ TracingEnabled: true
+Resources:
+ MyKMSKey:
+ Type: AWS::KMS::Key
+ Properties:
+ Enabled: true
+ KeyPolicy:
+ Version: 2012-10-17
+ Statement:
+ - Effect: Allow
+ Action: kms:*
+ Resource: "*"
+ Principal:
+ AWS: !Join [ "", [ "arn:aws:iam::", !Ref "AWS::AccountId", ":root" ] ]
+ Function128:
+ Type: AWS::Serverless::Function # More info about Function Resource: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-resource-function.html
+ Properties:
+ Handler: app.lambda_handler
+ CodeUri: function_128
+ Description: function 128 MB
+ MemorySize: 128
+ Architectures:
+ - x86_64
+ Policies:
+ Statement:
+ - Effect: Allow
+ Action: kms:*
+ Resource: !GetAtt MyKMSKey.Arn
+ Tracing: Active
+ Events:
+ HelloPath:
+ Type: Api # More info about API Event Source: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-property-function-api.html
+ Properties:
+ Path: /function128
+ Method: GET
+ # Powertools for AWS Lambda (Python) env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables
+ Environment:
+ Variables:
+ POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld
+ POWERTOOLS_METRICS_NAMESPACE: Powertools
+ LOG_LEVEL: INFO
+ KMS_KEY_ARN: !GetAtt MyKMSKey.Arn
+ Tags:
+ LambdaPowertools: python
+ Function1024:
+ Type: AWS::Serverless::Function # More info about Function Resource: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-resource-function.html
+ Properties:
+ Handler: app.lambda_handler
+ CodeUri: function_1024
+ Description: function 1024 MB
+ MemorySize: 1024
+ Architectures:
+ - x86_64
+ Policies:
+ Statement:
+ - Effect: Allow
+ Action: kms:*
+ Resource: !GetAtt MyKMSKey.Arn
+ Tracing: Active
+ Events:
+ HelloPath:
+ Type: Api # More info about API Event Source: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-property-function-api.html
+ Properties:
+ Path: /function1024
+ Method: GET
+ # Powertools for AWS Lambda (Python) env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables
+ Environment:
+ Variables:
+ POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld
+ POWERTOOLS_METRICS_NAMESPACE: Powertools
+ LOG_LEVEL: INFO
+ KMS_KEY_ARN: !GetAtt MyKMSKey.Arn
+ Tags:
+ LambdaPowertools: python
+ Function1769:
+ Type: AWS::Serverless::Function # More info about Function Resource: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-resource-function.html
+ Properties:
+ Handler: app.lambda_handler
+ CodeUri: function_1769
+ Description: function 1769 MB
+ MemorySize: 1769
+ Architectures:
+ - x86_64
+ Policies:
+ Statement:
+ - Effect: Allow
+ Action: kms:*
+ Resource: !GetAtt MyKMSKey.Arn
+ Tracing: Active
+ Events:
+ HelloPath:
+ Type: Api # More info about API Event Source: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-property-function-api.html
+ Properties:
+ Path: /function1769
+ Method: GET
+ # Powertools for AWS Lambda (Python) env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables
+ Environment:
+ Variables:
+ POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld
+ POWERTOOLS_METRICS_NAMESPACE: Powertools
+ LOG_LEVEL: INFO
+ KMS_KEY_ARN: !GetAtt MyKMSKey.Arn
+ Tags:
+ LambdaPowertools: python
+
+Outputs:
+ KMSKeyArn:
+ Description: ARN of the KMS Key
+ Value: !GetAtt MyKMSKey.Arn
+
+ 128FunctionApi:
+ Description: API Gateway endpoint URL for Prod environment for Function 128 MB
+ Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/function128"
+
+ 1024FunctionApi:
+ Description: API Gateway endpoint URL for Prod environment for Function 1024 MB
+ Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/function1024"
+
+ 1769FunctionApi:
+ Description: API Gateway endpoint URL for Prod environment for Function 1769 MB
+ Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/function1769"
+
+ Function128:
+ Description: Lambda Function 128 MB ARN
+ Value: !GetAtt Function128.Arn
+
+ Function1024:
+ Description: Lambda Function 1024 MB ARN
+ Value: !GetAtt Function1024.Arn
+
+ Function1769:
+ Description: Lambda Function 1769 MB ARN
+ Value: !GetAtt Function1769.Arn
diff --git a/tests/performance/data_masking/test_perf_data_masking.py b/tests/performance/data_masking/test_perf_data_masking.py
new file mode 100644
index 00000000000..688e36c7a64
--- /dev/null
+++ b/tests/performance/data_masking/test_perf_data_masking.py
@@ -0,0 +1,69 @@
+import importlib
+from types import ModuleType
+
+import pytest
+
+from aws_lambda_powertools.utilities._data_masking.base import DataMasking
+
+DATA_MASKING_PACKAGE = "aws_lambda_powertools.utilities._data_masking"
+DATA_MASKING_INIT_SLA: float = 0.002
+DATA_MASKING_NESTED_ENCRYPT_SLA: float = 0.001
+
+json_blob = {
+ "id": 1,
+ "name": "John Doe",
+ "age": 30,
+ "email": "johndoe@example.com",
+ "address": {"street": "123 Main St", "city": "Anytown", "state": "CA", "zip": "12345"},
+ "phone_numbers": ["+1-555-555-1234", "+1-555-555-5678"],
+ "interests": ["Hiking", "Traveling", "Photography", "Reading"],
+ "job_history": {
+ "company": {
+ "company_name": "Acme Inc.",
+ "company_address": "5678 Interview Dr.",
+ },
+ "position": "Software Engineer",
+ "start_date": "2015-01-01",
+ "end_date": "2017-12-31",
+ },
+ "about_me": """
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla tincidunt velit quis
+ sapien mollis, at egestas massa tincidunt. Suspendisse ultrices arcu a dolor dapibus,
+ ut pretium turpis volutpat. Vestibulum at sapien quis sapien dignissim volutpat ut a enim.
+ Praesent fringilla sem eu dui convallis luctus. Donec ullamcorper, sapien ut convallis congue,
+ risus mauris pretium tortor, nec dignissim arcu urna a nisl. Vivamus non fermentum ex. Proin
+ interdum nisi id sagittis egestas. Nam sit amet nisi nec quam pharetra sagittis. Aliquam erat
+ volutpat. Donec nec luctus sem, nec ornare lorem. Vivamus vitae orci quis enim faucibus placerat.
+ Nulla facilisi. Proin in turpis orci. Donec imperdiet velit ac tellus gravida, eget laoreet tellus
+ malesuada. Praesent venenatis tellus ac urna blandit, at varius felis posuere. Integer a commodo nunc.
+ """,
+}
+json_blob_fields = ["address.street", "job_history.company.company_name"]
+
+
+def import_data_masking_utility() -> ModuleType:
+ """Dynamically imports and return DataMasking module"""
+ return importlib.import_module(DATA_MASKING_PACKAGE)
+
+
+@pytest.mark.perf
+@pytest.mark.benchmark(group="core", disable_gc=True, warmup=False)
+def test_data_masking_init(benchmark):
+ benchmark.pedantic(import_data_masking_utility)
+ stat = benchmark.stats.stats.max
+ if stat > DATA_MASKING_INIT_SLA:
+ pytest.fail(f"High level imports should be below {DATA_MASKING_INIT_SLA}s: {stat}")
+
+
+def mask_json_blob():
+ data_masker = DataMasking()
+ data_masker.mask(json_blob, json_blob_fields)
+
+
+@pytest.mark.perf
+@pytest.mark.benchmark(group="core", disable_gc=True, warmup=False)
+def test_data_masking_encrypt_with_json_blob(benchmark):
+ benchmark.pedantic(mask_json_blob)
+ stat = benchmark.stats.stats.max
+ if stat > DATA_MASKING_NESTED_ENCRYPT_SLA:
+ pytest.fail(f"High level imports should be below {DATA_MASKING_NESTED_ENCRYPT_SLA}s: {stat}")
diff --git a/tests/unit/data_classes/test_vpc_lattice_eventv2.py b/tests/unit/data_classes/test_vpc_lattice_eventv2.py
new file mode 100644
index 00000000000..3726831445f
--- /dev/null
+++ b/tests/unit/data_classes/test_vpc_lattice_eventv2.py
@@ -0,0 +1,35 @@
+from aws_lambda_powertools.utilities.data_classes.vpc_lattice import VPCLatticeEventV2
+from tests.functional.utils import load_event
+
+
+def test_vpc_lattice_v2_event():
+ raw_event = load_event("vpcLatticeV2Event.json")
+ parsed_event = VPCLatticeEventV2(raw_event)
+
+ assert parsed_event.path == raw_event["path"]
+ assert parsed_event.get_query_string_value("order-id") == "1"
+ assert parsed_event.get_header_value("user_agent") == "curl/7.64.1"
+ assert parsed_event.decoded_body == '{"message": "Hello from Lambda!"}'
+ assert parsed_event.json_body == {"message": "Hello from Lambda!"}
+ assert parsed_event.method == raw_event["method"]
+ assert parsed_event.headers == raw_event["headers"]
+ assert parsed_event.query_string_parameters == raw_event["queryStringParameters"]
+ assert parsed_event.body == raw_event["body"]
+ assert parsed_event.is_base64_encoded == raw_event["isBase64Encoded"]
+ assert parsed_event.request_context.region == raw_event["requestContext"]["region"]
+ assert parsed_event.request_context.service_network_arn == raw_event["requestContext"]["serviceNetworkArn"]
+ assert parsed_event.request_context.service_arn == raw_event["requestContext"]["serviceArn"]
+ assert parsed_event.request_context.target_group_arn == raw_event["requestContext"]["targetGroupArn"]
+ assert parsed_event.request_context.time_epoch == raw_event["requestContext"]["timeEpoch"]
+ assert (
+ parsed_event.request_context.identity.source_vpc_arn == raw_event["requestContext"]["identity"]["sourceVpcArn"]
+ )
+ assert parsed_event.request_context.identity.get_type == raw_event["requestContext"]["identity"]["type"]
+ assert parsed_event.request_context.identity.principal == raw_event["requestContext"]["identity"]["principal"]
+ assert parsed_event.request_context.identity.session_name == raw_event["requestContext"]["identity"]["sessionName"]
+ assert parsed_event.request_context.identity.x509_san_dns == raw_event["requestContext"]["identity"]["x509SanDns"]
+ assert parsed_event.request_context.identity.x509_issuer_ou is None
+ assert parsed_event.request_context.identity.x509_san_name_cn is None
+ assert parsed_event.request_context.identity.x509_san_uri is None
+ assert parsed_event.request_context.identity.x509_subject_cn is None
+ assert parsed_event.request_context.identity.principal_org_id is None
diff --git a/tests/unit/data_masking/test_unit_data_masking.py b/tests/unit/data_masking/test_unit_data_masking.py
new file mode 100644
index 00000000000..4a92a668d73
--- /dev/null
+++ b/tests/unit/data_masking/test_unit_data_masking.py
@@ -0,0 +1,205 @@
+import json
+
+import pytest
+
+from aws_lambda_powertools.utilities._data_masking.base import DataMasking
+from aws_lambda_powertools.utilities._data_masking.constants import DATA_MASKING_STRING
+
+
+@pytest.fixture
+def data_masker() -> DataMasking:
+ return DataMasking()
+
+
+def test_mask_int(data_masker):
+ # GIVEN an int data type
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask(42)
+
+ # THEN the result is the data masked
+ assert masked_string == DATA_MASKING_STRING
+
+
+def test_mask_float(data_masker):
+ # GIVEN a float data type
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask(4.2)
+
+ # THEN the result is the data masked
+ assert masked_string == DATA_MASKING_STRING
+
+
+def test_mask_bool(data_masker):
+ # GIVEN a bool data type
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask(True)
+
+ # THEN the result is the data masked
+ assert masked_string == DATA_MASKING_STRING
+
+
+def test_mask_none(data_masker):
+ # GIVEN a None data type
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask(None)
+
+ # THEN the result is the data masked
+ assert masked_string == DATA_MASKING_STRING
+
+
+def test_mask_str(data_masker):
+ # GIVEN a str data type
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask("this is a string")
+
+ # THEN the result is the data masked
+ assert masked_string == DATA_MASKING_STRING
+
+
+def test_mask_list(data_masker):
+ # GIVEN a list data type
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask([1, 2, "string", 3])
+
+ # THEN the result is the data masked, while maintaining type list
+ assert masked_string == [DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING]
+
+
+def test_mask_dict(data_masker):
+ # GIVEN a dict data type
+ data = {
+ "a": {
+ "1": {"None": "hello", "four": "world"},
+ "b": {"3": {"4": "goodbye", "e": "world"}},
+ },
+ }
+
+ # WHEN mask is called with no fields argument
+ masked_string = data_masker.mask(data)
+
+ # THEN the result is the data masked
+ assert masked_string == DATA_MASKING_STRING
+
+
+def test_mask_dict_with_fields(data_masker):
+ # GIVEN a dict data type
+ data = {
+ "a": {
+ "1": {"None": "hello", "four": "world"},
+ "b": {"3": {"4": "goodbye", "e": "world"}},
+ },
+ }
+
+ # WHEN mask is called with a list of fields specified
+ masked_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"])
+
+ # THEN the result is only the specified fields are masked
+ assert masked_string == {
+ "a": {
+ "1": {"None": DATA_MASKING_STRING, "four": "world"},
+ "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}},
+ },
+ }
+
+
+def test_mask_json_dict_with_fields(data_masker):
+ # GIVEN the data type is a json representation of a dictionary
+ data = json.dumps(
+ {
+ "a": {
+ "1": {"None": "hello", "four": "world"},
+ "b": {"3": {"4": "goodbye", "e": "world"}},
+ },
+ },
+ )
+
+ # WHEN mask is called with a list of fields specified
+ masked_json_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"])
+
+ # THEN the result is only the specified fields are masked
+ assert masked_json_string == {
+ "a": {
+ "1": {"None": DATA_MASKING_STRING, "four": "world"},
+ "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}},
+ },
+ }
+
+
+def test_encrypt_not_implemented(data_masker):
+ # GIVEN DataMasking is not initialized with a Provider
+
+ # WHEN attempting to call the encrypt method on the data
+ with pytest.raises(NotImplementedError):
+ # THEN the result is a NotImplementedError
+ data_masker.encrypt("hello world")
+
+
+def test_decrypt_not_implemented(data_masker):
+ # GIVEN DataMasking is not initialized with a Provider
+
+ # WHEN attempting to call the decrypt method on the data
+ with pytest.raises(NotImplementedError):
+ # THEN the result is a NotImplementedError
+ data_masker.decrypt("hello world")
+
+
+def test_parsing_unsupported_data_type(data_masker):
+ # GIVEN an initialization of the DataMasking class
+
+ # WHEN attempting to pass in a list of fields with input data that is not a dict
+ with pytest.raises(TypeError):
+ # THEN the result is a TypeError
+ data_masker.mask(42, ["this.field"])
+
+
+def test_parsing_nonexistent_fields(data_masker):
+ # GIVEN a dict data type
+ data = {
+ "3": {
+ "1": {"None": "hello", "four": "world"},
+ "4": {"33": {"5": "goodbye", "e": "world"}},
+ },
+ }
+
+ # WHEN attempting to pass in fields that do not exist in the input data
+ with pytest.raises(KeyError):
+ # THEN the result is a KeyError
+ data_masker.mask(data, ["3.1.True"])
+
+
+def test_parsing_nonstring_fields(data_masker):
+ # GIVEN a dict data type
+ data = {
+ "3": {
+ "1": {"None": "hello", "four": "world"},
+ "4": {"33": {"5": "goodbye", "e": "world"}},
+ },
+ }
+
+ # WHEN attempting to pass in a list of fields that are not strings
+ masked = data_masker.mask(data, fields=[3.4])
+
+ # THEN the result is the value of the nested field should be masked as normal
+ assert masked == {"3": {"1": {"None": "hello", "four": "world"}, "4": DATA_MASKING_STRING}}
+
+
+def test_parsing_nonstring_keys_and_fields(data_masker):
+ # GIVEN a dict data type with integer keys
+ data = {
+ 3: {
+ "1": {"None": "hello", "four": "world"},
+ 4: {"33": {"5": "goodbye", "e": "world"}},
+ },
+ }
+
+ # WHEN masked with a list of fields that are integer keys
+ masked = data_masker.mask(data, fields=[3.4])
+
+ # THEN the result is the value of the nested field should be masked
+ assert masked == {"3": {"1": {"None": "hello", "four": "world"}, "4": DATA_MASKING_STRING}}
diff --git a/tests/unit/parser/test_vpc_latticev2.py b/tests/unit/parser/test_vpc_latticev2.py
new file mode 100644
index 00000000000..78d93fde041
--- /dev/null
+++ b/tests/unit/parser/test_vpc_latticev2.py
@@ -0,0 +1,67 @@
+import pytest
+
+from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse
+from aws_lambda_powertools.utilities.parser.models import VpcLatticeV2Model
+from tests.functional.utils import load_event
+from tests.unit.parser.schemas import MyVpcLatticeBusiness
+
+
+def test_vpc_lattice_v2_event_with_envelope():
+ raw_event = load_event("vpcLatticeV2Event.json")
+ raw_event["body"] = '{"username": "Stephen", "name": "Bawks"}'
+ parsed_event: MyVpcLatticeBusiness = parse(
+ event=raw_event,
+ model=MyVpcLatticeBusiness,
+ envelope=envelopes.VpcLatticeV2Envelope,
+ )
+
+ assert parsed_event.username == "Stephen"
+ assert parsed_event.name == "Bawks"
+
+
+def test_vpc_lattice_v2_event():
+ raw_event = load_event("vpcLatticeV2Event.json")
+ model = VpcLatticeV2Model(**raw_event)
+
+ assert model.body == raw_event["body"]
+ assert model.method == raw_event["method"]
+ assert model.path == raw_event["path"]
+ assert model.is_base64_encoded == raw_event["isBase64Encoded"]
+ assert model.headers == raw_event["headers"]
+ assert model.query_string_parameters == raw_event["queryStringParameters"]
+ assert model.request_context.region == raw_event["requestContext"]["region"]
+ assert model.request_context.service_network_arn == raw_event["requestContext"]["serviceNetworkArn"]
+ assert model.request_context.service_arn == raw_event["requestContext"]["serviceArn"]
+ assert model.request_context.target_group_arn == raw_event["requestContext"]["targetGroupArn"]
+ assert model.request_context.time_epoch == float(raw_event["requestContext"]["timeEpoch"])
+ convert_time = int((model.request_context.time_epoch_as_datetime.timestamp() * 1000))
+ event_converted_time = round(int(raw_event["requestContext"]["timeEpoch"]) / 1000)
+ assert convert_time == event_converted_time
+ assert model.request_context.identity.source_vpc_arn == raw_event["requestContext"]["identity"]["sourceVpcArn"]
+ assert model.request_context.identity.get_type == raw_event["requestContext"]["identity"]["type"]
+ assert model.request_context.identity.principal == raw_event["requestContext"]["identity"]["principal"]
+ assert model.request_context.identity.session_name == raw_event["requestContext"]["identity"]["sessionName"]
+ assert model.request_context.identity.x509_san_dns == raw_event["requestContext"]["identity"]["x509SanDns"]
+ assert model.request_context.identity.x509_issuer_ou is None
+ assert model.request_context.identity.x509_san_name_cn is None
+ assert model.request_context.identity.x509_san_uri is None
+ assert model.request_context.identity.x509_subject_cn is None
+ assert model.request_context.identity.principal_org_id is None
+
+
+def test_vpc_lattice_v2_event_custom_model():
+ class MyCustomResource(VpcLatticeV2Model):
+ body: str
+
+ raw_event = load_event("vpcLatticeV2Event.json")
+ model = MyCustomResource(**raw_event)
+
+ assert model.body == raw_event["body"]
+
+
+def test_vpc_lattice_v2_event_invalid():
+ raw_event = load_event("vpcLatticeV2Event.json")
+ raw_event["body"] = ["some_more_data"]
+
+ with pytest.raises(ValidationError):
+ VpcLatticeV2Model(**raw_event)