From a264bc3c845363b518a4a851efcb3bb3e12dbada Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 May 2023 01:10:09 +0100 Subject: [PATCH 01/76] chore(deps-dev): bump aws-cdk from 2.77.0 to 2.78.0 (#2202) --- package-lock.json | 14 +++++++------- package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/package-lock.json b/package-lock.json index 944c28b98a2..37bee4fb126 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,13 +8,13 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.77.0" + "aws-cdk": "^2.78.0" } }, "node_modules/aws-cdk": { - "version": "2.77.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.77.0.tgz", - "integrity": "sha512-f0UpWjBxrFkINqlwL50OpIIC03V39hTzg4+NEBlfUc/ftFX8WQQYyT6h29IfxT9Tgo+YoEMlM1nnH/s1c+VKSw==", + "version": "2.78.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.78.0.tgz", + "integrity": "sha512-fDqImTHefBjr8RYduO0bQRkINYJRGKdTXABeLsaMP6Ff4qDXTymaplyvUxNSB9DlQ+oXQ/aJgqFGiIJXSr+kpg==", "dev": true, "bin": { "cdk": "bin/cdk" @@ -43,9 +43,9 @@ }, "dependencies": { "aws-cdk": { - "version": "2.77.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.77.0.tgz", - "integrity": "sha512-f0UpWjBxrFkINqlwL50OpIIC03V39hTzg4+NEBlfUc/ftFX8WQQYyT6h29IfxT9Tgo+YoEMlM1nnH/s1c+VKSw==", + "version": "2.78.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.78.0.tgz", + "integrity": "sha512-fDqImTHefBjr8RYduO0bQRkINYJRGKdTXABeLsaMP6Ff4qDXTymaplyvUxNSB9DlQ+oXQ/aJgqFGiIJXSr+kpg==", "dev": true, "requires": { "fsevents": "2.3.2" diff --git a/package.json b/package.json index 0a074487100..18f7afaf4bd 100644 --- a/package.json +++ b/package.json @@ -2,6 +2,6 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.77.0" + "aws-cdk": "^2.78.0" } } From 70796865bb816031da697879b7e4d6ccc3746539 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Fri, 5 May 2023 12:01:11 +0100 Subject: [PATCH 02/76] adding capitalone --- README.md | 1 + docs/index.md | 1 + 2 files changed, 2 insertions(+) diff --git a/README.md b/README.md index 310c267af6e..ee025f52261 100644 --- a/README.md +++ b/README.md @@ -53,6 +53,7 @@ Knowing which companies are using this library is important to help prioritize t The following companies, among others, use Powertools: +* [Capital One](https://www.capitalone.com/) * [CPQi (Exadel Financial Services)](https://cpqi.com/) * [CloudZero](https://www.cloudzero.com/) * [CyberArk](https://www.cyberark.com/) diff --git a/docs/index.md b/docs/index.md index 2aafa480f07..2d64d574215 100644 --- a/docs/index.md +++ b/docs/index.md @@ -745,6 +745,7 @@ Knowing which companies are using this library is important to help prioritize t The following companies, among others, use Powertools: +* [Capital One](https://www.capitalone.com/){target="_blank"} * [CPQi (Exadel Financial Services)](https://cpqi.com/){target="_blank"} * [CloudZero](https://www.cloudzero.com/){target="_blank"} * [CyberArk](https://www.cyberark.com/){target="_blank"} From 0ab3b705ffc07eb30184f94ebdfae678afa574d6 Mon Sep 17 00:00:00 2001 From: Release bot Date: Fri, 5 May 2023 11:03:08 +0000 Subject: [PATCH 03/76] update changelog with latest changes --- CHANGELOG.md | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 541e8f3e0ff..d844c72505a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ # Unreleased +## Maintenance + +* **deps-dev:** bump aws-cdk from 2.77.0 to 2.78.0 ([#2202](https://github.com/awslabs/aws-lambda-powertools-python/issues/2202)) + + + +## [v2.15.0] - 2023-05-04 ## Bug Fixes * typo @@ -31,20 +38,22 @@ ## Maintenance +* update v2 layer ARN on documentation * add dummy reusable dispatch analytics job +* **ci:** remove build step from release env; no more secrets need * **ci:** use new pypi trusted publisher for increase security ([#2198](https://github.com/awslabs/aws-lambda-powertools-python/issues/2198)) * **deps:** bump pypa/gh-action-pypi-publish from 1.8.5 to 1.8.6 ([#2201](https://github.com/awslabs/aws-lambda-powertools-python/issues/2201)) -* **deps-dev:** bump mkdocs-material from 9.1.8 to 9.1.9 ([#2190](https://github.com/awslabs/aws-lambda-powertools-python/issues/2190)) +* **deps-dev:** bump cfn-lint from 0.77.3 to 0.77.4 ([#2178](https://github.com/awslabs/aws-lambda-powertools-python/issues/2178)) * **deps-dev:** bump types-requests from 2.28.11.17 to 2.29.0.0 ([#2187](https://github.com/awslabs/aws-lambda-powertools-python/issues/2187)) * **deps-dev:** bump coverage from 7.2.4 to 7.2.5 ([#2186](https://github.com/awslabs/aws-lambda-powertools-python/issues/2186)) -* **deps-dev:** bump coverage from 7.2.3 to 7.2.4 ([#2179](https://github.com/awslabs/aws-lambda-powertools-python/issues/2179)) +* **deps-dev:** bump mkdocs-material from 9.1.8 to 9.1.9 ([#2190](https://github.com/awslabs/aws-lambda-powertools-python/issues/2190)) * **deps-dev:** bump importlib-metadata from 6.5.0 to 6.6.0 ([#2163](https://github.com/awslabs/aws-lambda-powertools-python/issues/2163)) * **deps-dev:** bump mypy-boto3-xray from 1.26.11.post1 to 1.26.122 ([#2173](https://github.com/awslabs/aws-lambda-powertools-python/issues/2173)) * **deps-dev:** bump aws-cdk from 2.76.0 to 2.77.0 ([#2174](https://github.com/awslabs/aws-lambda-powertools-python/issues/2174)) * **deps-dev:** bump mypy-boto3-lambda from 1.26.115 to 1.26.122 ([#2172](https://github.com/awslabs/aws-lambda-powertools-python/issues/2172)) * **deps-dev:** bump cfn-lint from 0.77.2 to 0.77.3 ([#2165](https://github.com/awslabs/aws-lambda-powertools-python/issues/2165)) * **deps-dev:** bump mkdocs-material from 9.1.6 to 9.1.8 ([#2162](https://github.com/awslabs/aws-lambda-powertools-python/issues/2162)) -* **deps-dev:** bump cfn-lint from 0.77.3 to 0.77.4 ([#2178](https://github.com/awslabs/aws-lambda-powertools-python/issues/2178)) +* **deps-dev:** bump coverage from 7.2.3 to 7.2.4 ([#2179](https://github.com/awslabs/aws-lambda-powertools-python/issues/2179)) * **governance:** add Lambda Powertools for .NET in issue templates ([#2196](https://github.com/awslabs/aws-lambda-powertools-python/issues/2196)) @@ -3211,7 +3220,8 @@ * Merge pull request [#5](https://github.com/awslabs/aws-lambda-powertools-python/issues/5) from jfuss/feat/python38 -[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.14.1...HEAD +[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.15.0...HEAD +[v2.15.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.14.1...v2.15.0 [v2.14.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.14.0...v2.14.1 [v2.14.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.13.0...v2.14.0 [v2.13.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.12.0...v2.13.0 From b7acc217270738e480ff16e9acc99dfda74f5a97 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 5 May 2023 15:42:24 +0200 Subject: [PATCH 04/76] chore(ci): enforce zero trust for third party workflows (#2215) Co-authored-by: Leandro Damascena --- .github/workflows/auto-merge.yml | 39 ------------------- .github/workflows/codeql-analysis.yml | 6 +-- .github/workflows/dispatch_analytics.yml | 2 +- .github/workflows/label_pr_on_title.yml | 4 +- .github/workflows/on_closed_issues.yml | 2 +- .github/workflows/on_label_added.yml | 4 +- .github/workflows/on_merged_pr.yml | 4 +- .github/workflows/on_opened_pr.yml | 8 ++-- .github/workflows/publish_v2_layer.yml | 8 ++-- .github/workflows/python_build.yml | 4 +- .github/workflows/record_pr.yml | 6 +-- .github/workflows/release.yml | 10 ++--- .../reusable_deploy_v2_layer_stack.yml | 12 +++--- .github/workflows/reusable_deploy_v2_sar.yml | 10 ++--- .../workflows/reusable_export_pr_details.yml | 16 ++++---- .../workflows/reusable_publish_changelog.yml | 2 +- .github/workflows/reusable_publish_docs.yml | 4 +- .../reusable_update_v2_layer_arn_docs.yml | 4 +- .github/workflows/run-e2e-tests.yml | 8 ++-- .github/workflows/secure_workflows.yml | 15 +------ 20 files changed, 58 insertions(+), 110 deletions(-) delete mode 100644 .github/workflows/auto-merge.yml diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml deleted file mode 100644 index b2a3d23bd9b..00000000000 --- a/.github/workflows/auto-merge.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: auto-merge - -on: - pull_request: - types: [opened, edited, synchronize] - -permissions: - contents: write - -jobs: - dependabot: - runs-on: ubuntu-latest - if: ${{ github.actor == 'dependabot[bot]' }} - steps: - - name: Dependabot metadata - id: metadata - uses: dependabot/fetch-metadata@v1.4.0 - with: - github-token: "${{ secrets.GITHUB_TOKEN }}" - - name: Enable auto-merge for mypy-boto3 stubs Dependabot PRs - if: ${{ contains(steps.metadata.outputs.dependency-names, 'mypy-boto3') && steps.metadata.outputs.update-type != 'version-update:semver-major' }} - run: gh pr merge --auto --squash "$PR_URL" - env: - PR_URL: ${{github.event.pull_request.html_url}} - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - # Maintenance: Experiment with literal array (toJSON('["dep1", "dep2"]')) to ease extending it - - name: Enable auto-merge for CDK Construct Lambda Layer Dependabot PRs - if: ${{ contains(steps.metadata.outputs.dependency-names, 'cdk-lambda-powertools-python-layer') && steps.metadata.outputs.update-type != 'version-update:semver-major' }} - run: gh pr merge --auto --squash "$PR_URL" - env: - PR_URL: ${{github.event.pull_request.html_url}} - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - # Maintenance: Revisit if CDK Constructs make breaking changes like CDK v1 - - name: Enable auto-merge for CDK Lib Construct - if: ${{ contains(steps.metadata.outputs.dependency-names, 'aws-cdk-lib') && steps.metadata.outputs.update-type != 'version-update:semver-major' }} - run: gh pr merge --auto --squash "$PR_URL" - env: - PR_URL: ${{github.event.pull_request.html_url}} - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index d70a5c024e7..31561d013ad 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -23,11 +23,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@f31a31c052207cc13b328d6295c5b728bb49568c # v2.13.1 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -36,4 +36,4 @@ jobs: # queries: ./path/to/local/query, your-org/your-repo/queries@main - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@f31a31c052207cc13b328d6295c5b728bb49568c # v2.13.1 diff --git a/.github/workflows/dispatch_analytics.yml b/.github/workflows/dispatch_analytics.yml index 49a276f6f61..b5bf7ea8ff5 100644 --- a/.github/workflows/dispatch_analytics.yml +++ b/.github/workflows/dispatch_analytics.yml @@ -29,7 +29,7 @@ jobs: environment: analytics steps: - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef + uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 with: aws-region: eu-central-1 role-to-assume: ${{ secrets.AWS_ANALYTICS_ROLE_ARN }} diff --git a/.github/workflows/label_pr_on_title.yml b/.github/workflows/label_pr_on_title.yml index 3815a49e9bd..0183cb1155d 100644 --- a/.github/workflows/label_pr_on_title.yml +++ b/.github/workflows/label_pr_on_title.yml @@ -22,9 +22,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: "Label PR based on title" - uses: actions/github-script@v6 + uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 env: PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} PR_TITLE: ${{ needs.get_pr_details.outputs.prTitle }} diff --git a/.github/workflows/on_closed_issues.yml b/.github/workflows/on_closed_issues.yml index ca815e4c07f..dfe854c5fbe 100644 --- a/.github/workflows/on_closed_issues.yml +++ b/.github/workflows/on_closed_issues.yml @@ -6,7 +6,7 @@ jobs: auto_comment: runs-on: ubuntu-latest steps: - - uses: aws-actions/closed-issue-message@v1 + - uses: aws-actions/closed-issue-message@8b6324312193476beecf11f8e8539d73a3553bf4 with: repo-token: "${{ secrets.GITHUB_TOKEN }}" message: | diff --git a/.github/workflows/on_label_added.yml b/.github/workflows/on_label_added.yml index e9180d8010a..88ca45439bd 100644 --- a/.github/workflows/on_label_added.yml +++ b/.github/workflows/on_label_added.yml @@ -23,10 +23,10 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 # Maintenance: Persist state per PR as an artifact to avoid spam on label add - name: "Suggest split large Pull Request" - uses: actions/github-script@v6 + uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 env: PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} PR_ACTION: ${{ needs.get_pr_details.outputs.prAction }} diff --git a/.github/workflows/on_merged_pr.yml b/.github/workflows/on_merged_pr.yml index cd97e1c306e..a4e8bf6d28d 100644 --- a/.github/workflows/on_merged_pr.yml +++ b/.github/workflows/on_merged_pr.yml @@ -20,9 +20,9 @@ jobs: runs-on: ubuntu-latest if: needs.get_pr_details.outputs.prIsMerged == 'true' steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: "Label PR related issue for release" - uses: actions/github-script@v6 + uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 env: PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} PR_BODY: ${{ needs.get_pr_details.outputs.prBody }} diff --git a/.github/workflows/on_opened_pr.yml b/.github/workflows/on_opened_pr.yml index 043ff9628cd..58f580e13af 100644 --- a/.github/workflows/on_opened_pr.yml +++ b/.github/workflows/on_opened_pr.yml @@ -19,9 +19,9 @@ jobs: needs: get_pr_details runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: "Ensure related issue is present" - uses: actions/github-script@v6 + uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 env: PR_BODY: ${{ needs.get_pr_details.outputs.prBody }} PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} @@ -36,9 +36,9 @@ jobs: needs: get_pr_details runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: "Ensure acknowledgement section is present" - uses: actions/github-script@v6 + uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 env: PR_BODY: ${{ needs.get_pr_details.outputs.prBody }} PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} diff --git a/.github/workflows/publish_v2_layer.yml b/.github/workflows/publish_v2_layer.yml index 8d8a8c34aae..7820678e813 100644 --- a/.github/workflows/publish_v2_layer.yml +++ b/.github/workflows/publish_v2_layer.yml @@ -38,17 +38,17 @@ jobs: working-directory: ./layer steps: - name: checkout - uses: actions/checkout@v3 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: fetch-depth: 0 - name: Install poetry run: pipx install poetry - name: Setup Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c # v3.6.0 with: node-version: "16.12" - name: Setup python - uses: actions/setup-python@v4 + uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 with: python-version: "3.10" cache: "pip" @@ -80,7 +80,7 @@ jobs: - name: zip output run: zip -r cdk.out.zip cdk.out - name: Archive CDK artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 with: name: cdk-layer-artefact path: layer/cdk.out.zip diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index 5daf9d5c2d3..9a6706caed9 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -32,11 +32,11 @@ jobs: env: PYTHON: "${{ matrix.python-version }}" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: Install poetry run: pipx install poetry - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 with: python-version: ${{ matrix.python-version }} cache: "poetry" diff --git a/.github/workflows/record_pr.yml b/.github/workflows/record_pr.yml index b1638ad8865..20cd93b897f 100644 --- a/.github/workflows/record_pr.yml +++ b/.github/workflows/record_pr.yml @@ -9,14 +9,14 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: "Extract PR details" - uses: actions/github-script@v6 + uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 with: script: | const script = require('.github/scripts/save_pr_details.js') await script({github, context, core}) - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 with: name: pr path: pr.txt diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8d965a4af4c..38aadc4d873 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -57,13 +57,13 @@ jobs: env: RELEASE_TAG_VERSION: ${{ inputs.version_to_publish }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: fetch-depth: 0 - name: Install poetry run: pipx install poetry - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 with: python-version: "3.10" cache: "poetry" @@ -124,7 +124,7 @@ jobs: env: RELEASE_VERSION: ${{ needs.build.outputs.RELEASE_VERSION }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: Restore release artifact from cache id: restore-release-build uses: actions/cache/restore@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 @@ -174,9 +174,9 @@ jobs: env: RELEASE_VERSION: ${{ needs.build.outputs.RELEASE_VERSION }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: Close issues related to this release - uses: actions/github-script@v6 + uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/reusable_deploy_v2_layer_stack.yml b/.github/workflows/reusable_deploy_v2_layer_stack.yml index 5af5d6385d0..912849c2e2c 100644 --- a/.github/workflows/reusable_deploy_v2_layer_stack.yml +++ b/.github/workflows/reusable_deploy_v2_layer_stack.yml @@ -93,20 +93,20 @@ jobs: has_arm64_support: "true" steps: - name: checkout - uses: actions/checkout@v3 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: Install poetry run: pipx install poetry - name: aws credentials - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef + uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 with: aws-region: ${{ matrix.region }} role-to-assume: ${{ secrets.AWS_LAYERS_ROLE_ARN }} - name: Setup Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c # v3.6.0 with: node-version: "16.12" - name: Setup python - uses: actions/setup-python@v4 + uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 with: python-version: "3.10" cache: "pip" @@ -124,7 +124,7 @@ jobs: - name: install deps run: poetry install - name: Download artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 with: name: ${{ inputs.artefact-name }} path: layer @@ -141,7 +141,7 @@ jobs: cat cdk-layer-stack/${{ matrix.region }}-layer-version.txt - name: Save Layer ARN artifact if: ${{ inputs.stage == 'PROD' }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 with: name: cdk-layer-stack path: ./layer/cdk-layer-stack/* # NOTE: upload-artifact does not inherit working-directory setting. diff --git a/.github/workflows/reusable_deploy_v2_sar.yml b/.github/workflows/reusable_deploy_v2_sar.yml index b9416c5f94d..4ca28543f24 100644 --- a/.github/workflows/reusable_deploy_v2_sar.yml +++ b/.github/workflows/reusable_deploy_v2_sar.yml @@ -48,14 +48,14 @@ jobs: architecture: ["x86_64", "arm64"] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: AWS credentials - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef + uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 with: aws-region: ${{ env.AWS_REGION }} role-to-assume: ${{ secrets.AWS_LAYERS_ROLE_ARN }} - name: AWS credentials SAR role - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef + uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 id: aws-credentials-sar-role with: aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} @@ -65,11 +65,11 @@ jobs: aws-region: ${{ env.AWS_REGION }} role-to-assume: ${{ secrets.AWS_SAR_V2_ROLE_ARN }} - name: Setup Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c # v3.6.0 with: node-version: ${{ env.NODE_VERSION }} - name: Download artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 with: name: ${{ inputs.artefact-name }} - name: Unzip artefact diff --git a/.github/workflows/reusable_export_pr_details.yml b/.github/workflows/reusable_export_pr_details.yml index 2a1b45331d1..39ce20672c6 100644 --- a/.github/workflows/reusable_export_pr_details.yml +++ b/.github/workflows/reusable_export_pr_details.yml @@ -53,9 +53,9 @@ jobs: prIsMerged: ${{ steps.prIsMerged.outputs.prIsMerged }} steps: - name: Checkout repository # in case caller workflow doesn't checkout thus failing with file not found - uses: actions/checkout@v3 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: "Download previously saved PR" - uses: actions/github-script@v6 + uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 env: WORKFLOW_ID: ${{ inputs.record_pr_workflow_id }} # For security, we only download artifacts tied to the successful PR recording workflow @@ -71,19 +71,19 @@ jobs: # otherwise the parent caller won't see them regardless on how outputs are set. - name: "Export Pull Request Number" id: prNumber - run: echo "prNumber=$(jq -c '.number' ${FILENAME})" >> $GITHUB_OUTPUT + run: echo prNumber="$(jq -c '.number' "${FILENAME}")" >> "$GITHUB_OUTPUT" - name: "Export Pull Request Title" id: prTitle - run: echo "prTitle=$(jq -c '.pull_request.title' ${FILENAME})" >> $GITHUB_OUTPUT + run: echo prTitle="$(jq -c '.pull_request.title' "${FILENAME}")" >> "$GITHUB_OUTPUT" - name: "Export Pull Request Body" id: prBody - run: echo "prBody=$(jq -c '.pull_request.body' ${FILENAME})" >> $GITHUB_OUTPUT + run: echo prBody="$(jq -c '.pull_request.body' "${FILENAME}")" >> "$GITHUB_OUTPUT" - name: "Export Pull Request Author" id: prAuthor - run: echo "prAuthor=$(jq -c '.pull_request.user.login' ${FILENAME})" >> $GITHUB_OUTPUT + run: echo prAuthor="$(jq -c '.pull_request.user.login' "${FILENAME}")" >> "$GITHUB_OUTPUT" - name: "Export Pull Request Action" id: prAction - run: echo "prAction=$(jq -c '.action' ${FILENAME})" >> $GITHUB_OUTPUT + run: echo prAction="$(jq -c '.action' "${FILENAME}")" >> "$GITHUB_OUTPUT" - name: "Export Pull Request Merged status" id: prIsMerged - run: echo "prIsMerged=$(jq -c '.pull_request.merged' ${FILENAME})" >> $GITHUB_OUTPUT + run: echo prIsMerged="$(jq -c '.pull_request.merged' "${FILENAME}")" >> "$GITHUB_OUTPUT" diff --git a/.github/workflows/reusable_publish_changelog.yml b/.github/workflows/reusable_publish_changelog.yml index 30e49b98ae5..2e038eae924 100644 --- a/.github/workflows/reusable_publish_changelog.yml +++ b/.github/workflows/reusable_publish_changelog.yml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository # reusable workflows start clean, so we need to checkout again - uses: actions/checkout@v3 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: fetch-depth: 0 - name: Git client setup and refresh tip diff --git a/.github/workflows/reusable_publish_docs.yml b/.github/workflows/reusable_publish_docs.yml index 9be91b212bf..9359229230f 100644 --- a/.github/workflows/reusable_publish_docs.yml +++ b/.github/workflows/reusable_publish_docs.yml @@ -33,13 +33,13 @@ jobs: group: on-docs-rebuild runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: fetch-depth: 0 - name: Install poetry run: pipx install poetry - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 with: python-version: "3.10" cache: "poetry" diff --git a/.github/workflows/reusable_update_v2_layer_arn_docs.yml b/.github/workflows/reusable_update_v2_layer_arn_docs.yml index 142d0a32e75..5fbf6814dcd 100644 --- a/.github/workflows/reusable_update_v2_layer_arn_docs.yml +++ b/.github/workflows/reusable_update_v2_layer_arn_docs.yml @@ -23,7 +23,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository # reusable workflows start clean, so we need to checkout again - uses: actions/checkout@v3 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: fetch-depth: 0 - name: Git client setup and refresh tip @@ -34,7 +34,7 @@ jobs: git config remote.origin.url >&- || git remote add origin https://github.com/"${origin}" # Git Detached mode (release notes) doesn't have origin git pull origin "${BRANCH}" - name: Download CDK layer artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 with: name: cdk-layer-stack path: cdk-layer-stack/ diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index 2f7b2f494ea..e3305114555 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -34,17 +34,17 @@ jobs: if: ${{ github.actor != 'dependabot[bot]' }} steps: - name: "Checkout" - uses: actions/checkout@v3 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: Install poetry run: pipx install poetry - name: "Use Python" - uses: actions/setup-python@v4 + uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 with: python-version: ${{ matrix.version }} architecture: "x64" cache: "poetry" - name: Setup Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c # v3.6.0 with: node-version: "16.12" - name: Install CDK CLI @@ -54,7 +54,7 @@ jobs: - name: Install dependencies run: make dev - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef + uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 with: role-to-assume: ${{ secrets.AWS_TEST_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} diff --git a/.github/workflows/secure_workflows.yml b/.github/workflows/secure_workflows.yml index b1b5cfc0c46..dc7f766b29a 100644 --- a/.github/workflows/secure_workflows.yml +++ b/.github/workflows/secure_workflows.yml @@ -14,19 +14,6 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: Ensure 3rd party workflows have SHA pinned uses: zgosalvez/github-actions-ensure-sha-pinned-actions@21991cec25093947ff3f62e4c223df0260c39944 # v2.1.2 - with: - # Trusted GitHub Actions and/or organizations - allowlist: | - aws-actions/ - actions/checkout - actions/github-script - actions/setup-node - actions/setup-python - actions/upload-artifact - actions/download-artifact - github/codeql-action/init - github/codeql-action/analyze - dependabot/fetch-metadata From 8b81fb07af5895bad75839ab7e1d73133aecd031 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 5 May 2023 16:04:39 +0200 Subject: [PATCH 05/76] chore(ci): remove auto-merge workflow (#2214) From 3143a0865e0b3f411f03382eda8587478a812bbb Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 5 May 2023 16:26:36 +0200 Subject: [PATCH 06/76] chore(ci): schedule changelog to rebuild daily at 8am, and on release only (#2216) --- .github/workflows/build_changelog.yml | 11 +++++++++++ .github/workflows/on_push_docs.yml | 16 +--------------- .github/workflows/rebuild_latest_docs.yml | 6 ------ 3 files changed, 12 insertions(+), 21 deletions(-) diff --git a/.github/workflows/build_changelog.yml b/.github/workflows/build_changelog.yml index 3cd6fffe855..ebc978022bc 100644 --- a/.github/workflows/build_changelog.yml +++ b/.github/workflows/build_changelog.yml @@ -3,6 +3,17 @@ name: Build changelog on: workflow_dispatch: + schedule: + # ┌───────────── minute (0 - 59) + # │ ┌───────────── hour (0 - 23) + # │ │ ┌───────────── day of the month (1 - 31) + # │ │ │ ┌───────────── month (1 - 12 or JAN-DEC) + # │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT) + # │ │ │ │ │ + # │ │ │ │ │ + # │ │ │ │ │ + # * * * * * + - cron: '0 8 * * *' jobs: changelog: diff --git a/.github/workflows/on_push_docs.yml b/.github/workflows/on_push_docs.yml index d46879ca6b1..340f669b0f7 100644 --- a/.github/workflows/on_push_docs.yml +++ b/.github/workflows/on_push_docs.yml @@ -8,15 +8,10 @@ on: - "docs/**" - "mkdocs.yml" - "examples/**" + - "CHANGELOG.md" jobs: - changelog: - permissions: - contents: write - uses: ./.github/workflows/reusable_publish_changelog.yml - release-docs: - needs: changelog permissions: contents: write pages: write @@ -24,12 +19,3 @@ jobs: with: version: develop alias: stage -# Maintenance: Only necessary in repo migration -# - name: Create redirect from old docs -# run: | -# git checkout gh-pages -# test -f 404.html && echo "Redirect already set" && exit 0 -# git checkout develop -- 404.html -# git add 404.html -# git commit -m "chore: set docs redirect" --no-verify -# git push origin gh-pages -f diff --git a/.github/workflows/rebuild_latest_docs.yml b/.github/workflows/rebuild_latest_docs.yml index aa3b2216289..3e481860992 100644 --- a/.github/workflows/rebuild_latest_docs.yml +++ b/.github/workflows/rebuild_latest_docs.yml @@ -15,13 +15,7 @@ on: required: true jobs: - changelog: - permissions: - contents: write - uses: ./.github/workflows/reusable_publish_changelog.yml - release-docs: - needs: changelog permissions: contents: write pages: write From e76e7a2ab3f79258fe161d2307a01a9c7158fea3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 May 2023 23:41:17 +0100 Subject: [PATCH 07/76] chore(deps-dev): bump mypy-boto3-s3 from 1.26.116 to 1.26.127 (#2218) --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index c0ea10b2660..e4d6d9b3e21 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1780,14 +1780,14 @@ typing-extensions = ">=4.1.0" [[package]] name = "mypy-boto3-s3" -version = "1.26.116" -description = "Type annotations for boto3.S3 1.26.116 service generated with mypy-boto3-builder 7.14.5" +version = "1.26.127" +description = "Type annotations for boto3.S3 1.26.127 service generated with mypy-boto3-builder 7.14.5" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-s3-1.26.116.tar.gz", hash = "sha256:597aac58bb2c962d166403d0bdc10cdfa62ac82c61b02faf69a461c5a5107087"}, - {file = "mypy_boto3_s3-1.26.116-py3-none-any.whl", hash = "sha256:dcdab86eae381c15b872c020e6b0d01ecaee4092190b60e313fac180b243e66a"}, + {file = "mypy-boto3-s3-1.26.127.tar.gz", hash = "sha256:0e548b97c6a2589f7bff5d26a1ca101622749771379226e3ad0822629d0613c5"}, + {file = "mypy_boto3_s3-1.26.127-py3-none-any.whl", hash = "sha256:21e647caa18d98dbbc706597c9b27d41674f18850f42b2cfdb9a39b39820e470"}, ] [package.dependencies] @@ -3035,4 +3035,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "302a00b68b6e28a6fd1258bb3c093daca2547a71586b0b44a5e162351b39bfe2" +content-hash = "489ea28b889343a7c9d138c04816ab162c08a66f35aee19ec724cc20845ce197" diff --git a/pyproject.toml b/pyproject.toml index 365cc2e80d0..a280f47f021 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,7 +78,7 @@ mypy-boto3-lambda = "^1.26.122" mypy-boto3-logs = "^1.26.53" mypy-boto3-secretsmanager = "^1.26.116" mypy-boto3-ssm = "^1.26.97" -mypy-boto3-s3 = "^1.26.116" +mypy-boto3-s3 = "^1.26.127" mypy-boto3-xray = "^1.26.122" types-requests = "^2.29.0" typing-extensions = "^4.4.0" From 8a67bf42d4c76cb52563a531ca617a3cd39ca549 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 May 2023 23:46:21 +0100 Subject: [PATCH 08/76] chore(deps-dev): bump types-requests from 2.29.0.0 to 2.30.0.0 (#2220) --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index e4d6d9b3e21..9d568e01a77 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2798,18 +2798,18 @@ files = [ [[package]] name = "types-requests" -version = "2.29.0.0" +version = "2.30.0.0" description = "Typing stubs for requests" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-requests-2.29.0.0.tar.gz", hash = "sha256:c86f4a955d943d2457120dbe719df24ef0924e11177164d10a0373cf311d7b4d"}, - {file = "types_requests-2.29.0.0-py3-none-any.whl", hash = "sha256:4cf6e323e856c779fbe8815bb977a5bf5d6c5034713e4c17ff2a9a20610f5b27"}, + {file = "types-requests-2.30.0.0.tar.gz", hash = "sha256:dec781054324a70ba64430ae9e62e7e9c8e4618c185a5cb3f87a6738251b5a31"}, + {file = "types_requests-2.30.0.0-py3-none-any.whl", hash = "sha256:c6cf08e120ca9f0dc4fa4e32c3f953c3fba222bcc1db6b97695bce8da1ba9864"}, ] [package.dependencies] -types-urllib3 = "<1.27" +types-urllib3 = "*" [[package]] name = "types-urllib3" @@ -3035,4 +3035,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "489ea28b889343a7c9d138c04816ab162c08a66f35aee19ec724cc20845ce197" +content-hash = "38dcaa8de56f68e1cc7dc3c9139ab4eb7efa6fa98b02ec8c171f1be9e9b052dd" diff --git a/pyproject.toml b/pyproject.toml index a280f47f021..66b0df6a5fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,7 +80,7 @@ mypy-boto3-secretsmanager = "^1.26.116" mypy-boto3-ssm = "^1.26.97" mypy-boto3-s3 = "^1.26.127" mypy-boto3-xray = "^1.26.122" -types-requests = "^2.29.0" +types-requests = "^2.30.0" typing-extensions = "^4.4.0" mkdocs-material = "^9.1.9" filelock = "^3.12.0" From 3f191c2f5e492ed90c09111df04aae10a83334c5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 May 2023 23:53:01 +0100 Subject: [PATCH 09/76] chore(deps-dev): bump mypy-boto3-cloudwatch from 1.26.99 to 1.26.127 (#2219) --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9d568e01a77..ee953be2792 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1720,14 +1720,14 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-cloudwatch" -version = "1.26.99" -description = "Type annotations for boto3.CloudWatch 1.26.99 service generated with mypy-boto3-builder 7.14.2" +version = "1.26.127" +description = "Type annotations for boto3.CloudWatch 1.26.127 service generated with mypy-boto3-builder 7.14.5" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-cloudwatch-1.26.99.tar.gz", hash = "sha256:ada2b942a88cc6fd307fef0fcb40d0765d3971e33e0c6f92a127d5abcfea1fdb"}, - {file = "mypy_boto3_cloudwatch-1.26.99-py3-none-any.whl", hash = "sha256:98b4b2d6363c17ab5a4c1dee2ee1da2d4b52333f0cae999b8145a0c28a86e61e"}, + {file = "mypy-boto3-cloudwatch-1.26.127.tar.gz", hash = "sha256:08281b75414293f112135eaaa9937ab2621e1db0762285a1face66ab937b4e0f"}, + {file = "mypy_boto3_cloudwatch-1.26.127-py3-none-any.whl", hash = "sha256:e2da3e956b1c67a3bc4374ced3331fff97172f98d63e56efea805dc1e5fa28e9"}, ] [package.dependencies] @@ -3035,4 +3035,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "38dcaa8de56f68e1cc7dc3c9139ab4eb7efa6fa98b02ec8c171f1be9e9b052dd" +content-hash = "72cae7a8316616aa14416d3fdee9a39e6822d161d508ed869da0b345de388402" diff --git a/pyproject.toml b/pyproject.toml index 66b0df6a5fd..700c2afc0e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,7 +72,7 @@ pytest-benchmark = "^4.0.0" python-snappy = "^0.6.1" mypy-boto3-appconfig = "^1.26.71" mypy-boto3-cloudformation = "^1.26.108" -mypy-boto3-cloudwatch = "^1.26.99" +mypy-boto3-cloudwatch = "^1.26.127" mypy-boto3-dynamodb = "^1.26.115" mypy-boto3-lambda = "^1.26.122" mypy-boto3-logs = "^1.26.53" From 9da2ef0c6ac4355b0408607ca9198866c850d9b6 Mon Sep 17 00:00:00 2001 From: Release bot Date: Sat, 6 May 2023 08:02:00 +0000 Subject: [PATCH 10/76] update changelog with latest changes --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d844c72505a..23bb8186ed5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,12 @@ ## Maintenance +* **ci:** schedule changelog to rebuild daily at 8am, and on release only ([#2216](https://github.com/awslabs/aws-lambda-powertools-python/issues/2216)) +* **ci:** remove auto-merge workflow ([#2214](https://github.com/awslabs/aws-lambda-powertools-python/issues/2214)) +* **ci:** enforce zero trust for third party workflows ([#2215](https://github.com/awslabs/aws-lambda-powertools-python/issues/2215)) +* **deps-dev:** bump mypy-boto3-cloudwatch from 1.26.99 to 1.26.127 ([#2219](https://github.com/awslabs/aws-lambda-powertools-python/issues/2219)) +* **deps-dev:** bump types-requests from 2.29.0.0 to 2.30.0.0 ([#2220](https://github.com/awslabs/aws-lambda-powertools-python/issues/2220)) +* **deps-dev:** bump mypy-boto3-s3 from 1.26.116 to 1.26.127 ([#2218](https://github.com/awslabs/aws-lambda-powertools-python/issues/2218)) * **deps-dev:** bump aws-cdk from 2.77.0 to 2.78.0 ([#2202](https://github.com/awslabs/aws-lambda-powertools-python/issues/2202)) From d599649ad51899ac1f6e3ca780849a0a076e9f21 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Mon, 8 May 2023 14:22:36 +0200 Subject: [PATCH 11/76] chore(ci): create pull request on changelog update (#2224) Co-authored-by: Leandro Damascena --- .../scripts/create_pr_for_staged_changes.sh | 116 ++++++++++++++++++ .../workflows/reusable_publish_changelog.yml | 27 ++-- 2 files changed, 132 insertions(+), 11 deletions(-) create mode 100644 .github/scripts/create_pr_for_staged_changes.sh diff --git a/.github/scripts/create_pr_for_staged_changes.sh b/.github/scripts/create_pr_for_staged_changes.sh new file mode 100644 index 00000000000..a35d45cc9e9 --- /dev/null +++ b/.github/scripts/create_pr_for_staged_changes.sh @@ -0,0 +1,116 @@ +#!/bin/bash +set -uxo pipefail # enable debugging, prevent accessing unset env vars, prevent masking pipeline errors to the next command + +#docs +#title :create_pr_for_staged_changes.sh +#description :This script will create a PR for staged changes and detect and close duplicate PRs. +#author :@heitorlessa +#date :May 8th 2023 +#version :0.1 +#usage :bash create_pr_for_staged_changes.sh {git_staged_files_or_directories_separated_by_space} +#notes :Meant to use in GitHub Actions only. Temporary branch will be named $TEMP_BRANCH_PREFIX-$GITHUB_RUN_ID +#os_version :Ubuntu 22.04.2 LTS +#required_env_vars :COMMIT_MSG, PR_TITLE, TEMP_BRANCH_PREFIX, GH_TOKEN, GITHUB_RUN_ID, GITHUB_SERVER_URL, GITHUB_REPOSITORY +#============================================================================== + +PR_BODY="This is an automated PR created from the following workflow" +FILENAME=".github/scripts/$(basename "$0")" +readonly PR_BODY +readonly FILENAME + +# Sets GitHub Action with error message to ease troubleshooting +function raise_validation_error() { + echo "::error file=${FILENAME}::$1" + exit 1 +} + +function debug() { + echo "::debug::$1" +} + +function notice() { + echo "::notice file=${FILENAME}::$1" +} + +function has_required_config() { + # Default GitHub Actions Env Vars: https://docs.github.com/en/actions/learn-github-actions/variables#default-environment-variables + debug "Do we have required environment variables?" + test -z "${TEMP_BRANCH_PREFIX}" && raise_validation_error "TEMP_BRANCH_PREFIX env must be set to create a PR" + test -z "${GH_TOKEN}" && raise_validation_error "GH_TOKEN env must be set for GitHub CLI" + test -z "${COMMIT_MSG}" && raise_validation_error "COMMIT_MSG env must be set" + test -z "${PR_TITLE}" && raise_validation_error "PR_TITLE env must be set" + test -z "${GITHUB_RUN_ID}" && raise_validation_error "GITHUB_RUN_ID env must be set to trace Workflow Run ID back to PR" + test -z "${GITHUB_SERVER_URL}" && raise_validation_error "GITHUB_SERVER_URL env must be set to trace Workflow Run ID back to PR" + test -z "${GITHUB_REPOSITORY}" && raise_validation_error "GITHUB_REPOSITORY env must be set to trace Workflow Run ID back to PR" + + set_environment_variables +} + +function set_environment_variables() { + WORKFLOW_URL="${GITHUB_SERVER_URL}"/"${GITHUB_REPOSITORY}"/actions/runs/"${GITHUB_RUN_ID}" # e.g., heitorlessa/aws-lambda-powertools-test/actions/runs/4913570678 + TEMP_BRANCH="${TEMP_BRANCH_PREFIX}"-"${GITHUB_RUN_ID}" # e.g., ci-changelog-4894658712 + + export readonly WORKFLOW_URL + export readonly TEMP_BRANCH +} + +function has_anything_changed() { + debug "Is there an update to the source code?" + HAS_ANY_SOURCE_CODE_CHANGED="$(git status --porcelain)" + + test -z "${HAS_ANY_SOURCE_CODE_CHANGED}" && echo "Nothing to update" && exit 0 +} + +function create_temporary_branch_with_changes() { + debug "Creating branch ${TEMP_BRANCH}" + git checkout -b "${TEMP_BRANCH}" + + debug "Committing staged files: $*" + git add "$@" + git commit -m "${COMMIT_MSG}" + + debug "Creating branch remotely" + git push origin "${TEMP_BRANCH}" +} + +function create_pr() { + debug "Creating PR against ${BRANCH} branch" + NEW_PR_URL=$(gh pr create --title "${PR_TITLE}" --body "${PR_BODY}: ${WORKFLOW_URL}" --base "${BRANCH}") # e.g, https://github.com/awslabs/aws-lambda-powertools/pull/13 + + # greedy remove any string until the last URL path, including the last '/'. https://opensource.com/article/17/6/bash-parameter-expansion + NEW_PR_ID="${NEW_PR_URL##*/}" # 13 + export NEW_PR_URL + export NEW_PR_ID +} + +function close_duplicate_prs() { + debug "Do we have any duplicate PRs?" + DUPLICATE_PRS=$(gh pr list --search "${PR_TITLE}" --json number --jq ".[] | select(.number != ${NEW_PR_ID}) | .number") # e.g, 13\n14 + + debug "Closing duplicated PRs if any" + echo "${DUPLICATE_PRS}" | xargs -L1 gh pr close --delete-branch --comment "Superseded by #${NEW_PR_ID}" + export readonly DUPLICATE_PRS +} + +function report_summary() { + debug "Creating job summary" + echo "### Pull request created successfully :rocket: #${NEW_PR_URL}

Closed duplicated PRs (if any): ${DUPLICATE_PRS}" >>"$GITHUB_STEP_SUMMARY" + + notice "PR_URL is ${NEW_PR_URL}" + notice "PR_BRANCH is ${TEMP_BRANCH}" + notice "PR_DUPLICATES are ${DUPLICATE_PRS}" +} + +function main() { + # Sanity check + has_anything_changed + has_required_config + + create_temporary_branch_with_changes "$@" + create_pr + close_duplicate_prs + + report_summary +} + +main "$@" diff --git a/.github/workflows/reusable_publish_changelog.yml b/.github/workflows/reusable_publish_changelog.yml index 2e038eae924..4294dda4a94 100644 --- a/.github/workflows/reusable_publish_changelog.yml +++ b/.github/workflows/reusable_publish_changelog.yml @@ -3,9 +3,6 @@ name: Build and publish latest changelog on: workflow_call: -permissions: - contents: write - env: BRANCH: develop @@ -16,6 +13,9 @@ jobs: concurrency: group: changelog-build runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write steps: - name: Checkout repository # reusable workflows start clean, so we need to checkout again uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 @@ -30,11 +30,16 @@ jobs: git pull origin "${BRANCH}" - name: "Generate latest changelog" run: make changelog - - name: Update Changelog in trunk - run: | - HAS_CHANGE=$(git status --porcelain) - test -z "${HAS_CHANGE}" && echo "Nothing to update" && exit 0 - git add CHANGELOG.md - git commit -m "update changelog with latest changes" - git pull origin "${BRANCH}" # prevents concurrent branch update failing push - git push origin HEAD:refs/heads/"${BRANCH}" + - name: Create PR + run: bash .github/scripts/create_pr_for_staged_changes.sh CHANGELOG.md + env: + COMMIT_MSG: "chore(ci): update changelog with latest changes" + PR_TITLE: "chore(ci): changelog rebuild" + TEMP_BRANCH_PREFIX: "ci-changelog" + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Cleanup orphaned branch + if: failure() + run: git push origin --delete "${TEMP_BRANCH_PREFIX}-${GITHUB_RUN_ID}" || echo "Must have failed before creating temporary branch; no cleanup needed." + env: + TEMP_BRANCH_PREFIX: "ci-changelog" + GITHUB_RUN_ID: ${{ github.run_id }} From 923e3685afcf17e9caed2331c86ba8e765bbf259 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Mon, 8 May 2023 15:30:34 +0200 Subject: [PATCH 12/76] chore(ci): skip analytics on forks (#2225) --- .github/workflows/dispatch_analytics.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/dispatch_analytics.yml b/.github/workflows/dispatch_analytics.yml index b5bf7ea8ff5..0b5a19e0408 100644 --- a/.github/workflows/dispatch_analytics.yml +++ b/.github/workflows/dispatch_analytics.yml @@ -23,6 +23,7 @@ permissions: jobs: dispatch_token: + if: github.repository == 'awslabs/aws-lambda-powertools-python' concurrency: group: analytics runs-on: ubuntu-latest From 30ef99f6b0ee470a0f0fd97f8508e50c72f04350 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 May 2023 22:30:49 +0100 Subject: [PATCH 13/76] chore(deps): bump zgosalvez/github-actions-ensure-sha-pinned-actions from 2.1.2 to 2.1.3 (#2227) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/secure_workflows.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/secure_workflows.yml b/.github/workflows/secure_workflows.yml index dc7f766b29a..b8ad121169c 100644 --- a/.github/workflows/secure_workflows.yml +++ b/.github/workflows/secure_workflows.yml @@ -16,4 +16,4 @@ jobs: - name: Checkout code uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: Ensure 3rd party workflows have SHA pinned - uses: zgosalvez/github-actions-ensure-sha-pinned-actions@21991cec25093947ff3f62e4c223df0260c39944 # v2.1.2 + uses: zgosalvez/github-actions-ensure-sha-pinned-actions@555a30da2656b4a7cf47b107800bef097723363e # v2.1.3 From a23d997539063e24bba1c37b92f5d93ed5ba1a02 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 May 2023 22:31:10 +0100 Subject: [PATCH 14/76] chore(deps-dev): bump cfn-lint from 0.77.4 to 0.77.5 (#2228) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index ee953be2792..126a5edc47a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -370,14 +370,14 @@ files = [ [[package]] name = "cfn-lint" -version = "0.77.4" +version = "0.77.5" description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" category = "dev" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "cfn-lint-0.77.4.tar.gz", hash = "sha256:0aa67e28c992b84ad286539de59a9185f51d721d54ad539f6afe1b477836d8cd"}, - {file = "cfn_lint-0.77.4-py3-none-any.whl", hash = "sha256:b348589be12c12dc5ab4ba801fb430f441bffe76e5ffdf907088abcbeb74271d"}, + {file = "cfn-lint-0.77.5.tar.gz", hash = "sha256:4282d13ffe76a5dee6431b1f56e3641d87c28b1ef5be663afe7d8dbf13f28bdb"}, + {file = "cfn_lint-0.77.5-py3-none-any.whl", hash = "sha256:b5126dffb834078a71341090d49669046076c09196f0d2bdca68dbace1bf357a"}, ] [package.dependencies] @@ -3035,4 +3035,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "72cae7a8316616aa14416d3fdee9a39e6822d161d508ed869da0b345de388402" +content-hash = "7ad4416d3f6cb7cf6dd19dfd270278f12e30330b9b23127d56f9f3f8a33a5623" diff --git a/pyproject.toml b/pyproject.toml index 700c2afc0e5..fc556cd2518 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -101,7 +101,7 @@ all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] aws-sdk = ["boto3"] [tool.poetry.group.dev.dependencies] -cfn-lint = "0.77.4" +cfn-lint = "0.77.5" mypy = "^1.1.1" types-python-dateutil = "^2.8.19.6" httpx = ">=0.23.3,<0.25.0" From d1e11b7f837e2e680833a6faa1e8ed75ae39988e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 May 2023 08:45:54 +0200 Subject: [PATCH 15/76] chore(deps-dev): bump mkdocs-material from 9.1.9 to 9.1.11 (#2229) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 126a5edc47a..0be463c3c39 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1574,14 +1574,14 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "9.1.9" +version = "9.1.11" description = "Documentation that simply works" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material-9.1.9-py3-none-any.whl", hash = "sha256:7db24261cb17400e132c46d17eea712bfe71056d892a9beba32cf68210297141"}, - {file = "mkdocs_material-9.1.9.tar.gz", hash = "sha256:74d8da1371ab3a326868fe47bae3cbc4aa22e93c048b4ca5117e6817b88bd734"}, + {file = "mkdocs_material-9.1.11-py3-none-any.whl", hash = "sha256:fbc86d50ec2cf34d40d5c4365780f290ceedde23f1a0704323b34e7f16b0c0dd"}, + {file = "mkdocs_material-9.1.11.tar.gz", hash = "sha256:f5d473eb79d6640a5e668d4b2ab5b9de5e76ae0a0e2d864112df0cfe9016dc1d"}, ] [package.dependencies] @@ -3035,4 +3035,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "7ad4416d3f6cb7cf6dd19dfd270278f12e30330b9b23127d56f9f3f8a33a5623" +content-hash = "0c140ce333e0131b6cf5fee17b8cba631dfd3bbd3ee5f8ab66175bfeed493842" diff --git a/pyproject.toml b/pyproject.toml index fc556cd2518..f2613d3c36d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ mypy-boto3-s3 = "^1.26.127" mypy-boto3-xray = "^1.26.122" types-requests = "^2.30.0" typing-extensions = "^4.4.0" -mkdocs-material = "^9.1.9" +mkdocs-material = "^9.1.11" filelock = "^3.12.0" checksumdir = "^1.2.0" mypy-boto3-appconfigdata = "^1.26.70" From 18751e63464cd2b3caf1d453ecabee38dc65e88b Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 9 May 2023 09:15:47 +0100 Subject: [PATCH 16/76] chore(ci): changelog rebuild (#2230) chore(ci): update changelog with latest changes Co-authored-by: Release bot --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 23bb8186ed5..c1f4dba30a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,8 +7,13 @@ ## Maintenance * **ci:** schedule changelog to rebuild daily at 8am, and on release only ([#2216](https://github.com/awslabs/aws-lambda-powertools-python/issues/2216)) +* **ci:** skip analytics on forks ([#2225](https://github.com/awslabs/aws-lambda-powertools-python/issues/2225)) +* **ci:** create pull request on changelog update ([#2224](https://github.com/awslabs/aws-lambda-powertools-python/issues/2224)) * **ci:** remove auto-merge workflow ([#2214](https://github.com/awslabs/aws-lambda-powertools-python/issues/2214)) * **ci:** enforce zero trust for third party workflows ([#2215](https://github.com/awslabs/aws-lambda-powertools-python/issues/2215)) +* **deps:** bump zgosalvez/github-actions-ensure-sha-pinned-actions from 2.1.2 to 2.1.3 ([#2227](https://github.com/awslabs/aws-lambda-powertools-python/issues/2227)) +* **deps-dev:** bump mkdocs-material from 9.1.9 to 9.1.11 ([#2229](https://github.com/awslabs/aws-lambda-powertools-python/issues/2229)) +* **deps-dev:** bump cfn-lint from 0.77.4 to 0.77.5 ([#2228](https://github.com/awslabs/aws-lambda-powertools-python/issues/2228)) * **deps-dev:** bump mypy-boto3-cloudwatch from 1.26.99 to 1.26.127 ([#2219](https://github.com/awslabs/aws-lambda-powertools-python/issues/2219)) * **deps-dev:** bump types-requests from 2.29.0.0 to 2.30.0.0 ([#2220](https://github.com/awslabs/aws-lambda-powertools-python/issues/2220)) * **deps-dev:** bump mypy-boto3-s3 from 1.26.116 to 1.26.127 ([#2218](https://github.com/awslabs/aws-lambda-powertools-python/issues/2218)) From cf548749cae5d7698f903b35671b3b073be280c9 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 10 May 2023 09:23:28 +0100 Subject: [PATCH 17/76] chore(ci): changelog rebuild (#2232) Co-authored-by: Release bot --- CHANGELOG.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c1f4dba30a8..bb9816fb9ca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,16 +7,17 @@ ## Maintenance * **ci:** schedule changelog to rebuild daily at 8am, and on release only ([#2216](https://github.com/awslabs/aws-lambda-powertools-python/issues/2216)) +* **ci:** enforce zero trust for third party workflows ([#2215](https://github.com/awslabs/aws-lambda-powertools-python/issues/2215)) +* **ci:** remove auto-merge workflow ([#2214](https://github.com/awslabs/aws-lambda-powertools-python/issues/2214)) * **ci:** skip analytics on forks ([#2225](https://github.com/awslabs/aws-lambda-powertools-python/issues/2225)) * **ci:** create pull request on changelog update ([#2224](https://github.com/awslabs/aws-lambda-powertools-python/issues/2224)) -* **ci:** remove auto-merge workflow ([#2214](https://github.com/awslabs/aws-lambda-powertools-python/issues/2214)) -* **ci:** enforce zero trust for third party workflows ([#2215](https://github.com/awslabs/aws-lambda-powertools-python/issues/2215)) +* **ci:** changelog rebuild ([#2230](https://github.com/awslabs/aws-lambda-powertools-python/issues/2230)) * **deps:** bump zgosalvez/github-actions-ensure-sha-pinned-actions from 2.1.2 to 2.1.3 ([#2227](https://github.com/awslabs/aws-lambda-powertools-python/issues/2227)) -* **deps-dev:** bump mkdocs-material from 9.1.9 to 9.1.11 ([#2229](https://github.com/awslabs/aws-lambda-powertools-python/issues/2229)) -* **deps-dev:** bump cfn-lint from 0.77.4 to 0.77.5 ([#2228](https://github.com/awslabs/aws-lambda-powertools-python/issues/2228)) * **deps-dev:** bump mypy-boto3-cloudwatch from 1.26.99 to 1.26.127 ([#2219](https://github.com/awslabs/aws-lambda-powertools-python/issues/2219)) -* **deps-dev:** bump types-requests from 2.29.0.0 to 2.30.0.0 ([#2220](https://github.com/awslabs/aws-lambda-powertools-python/issues/2220)) * **deps-dev:** bump mypy-boto3-s3 from 1.26.116 to 1.26.127 ([#2218](https://github.com/awslabs/aws-lambda-powertools-python/issues/2218)) +* **deps-dev:** bump types-requests from 2.29.0.0 to 2.30.0.0 ([#2220](https://github.com/awslabs/aws-lambda-powertools-python/issues/2220)) +* **deps-dev:** bump cfn-lint from 0.77.4 to 0.77.5 ([#2228](https://github.com/awslabs/aws-lambda-powertools-python/issues/2228)) +* **deps-dev:** bump mkdocs-material from 9.1.9 to 9.1.11 ([#2229](https://github.com/awslabs/aws-lambda-powertools-python/issues/2229)) * **deps-dev:** bump aws-cdk from 2.77.0 to 2.78.0 ([#2202](https://github.com/awslabs/aws-lambda-powertools-python/issues/2202)) From 209816fd23f3af1d2f8ca04da54f3eaef7f1b48e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 May 2023 22:09:28 +0100 Subject: [PATCH 18/76] chore(deps-dev): bump types-python-dateutil from 2.8.19.12 to 2.8.19.13 (#2234) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0be463c3c39..200ebf7d691 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2786,14 +2786,14 @@ test = ["mypy", "pytest", "typing-extensions"] [[package]] name = "types-python-dateutil" -version = "2.8.19.12" +version = "2.8.19.13" description = "Typing stubs for python-dateutil" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-python-dateutil-2.8.19.12.tar.gz", hash = "sha256:355b2cb82b31e556fd18e7b074de7c350c680ab80608f0cc55ba6770d986d67d"}, - {file = "types_python_dateutil-2.8.19.12-py3-none-any.whl", hash = "sha256:fe5b545e678ec13e3ddc83a0eee1545c1b5e2fba4cfc39b276ab6f4e7604a923"}, + {file = "types-python-dateutil-2.8.19.13.tar.gz", hash = "sha256:09a0275f95ee31ce68196710ed2c3d1b9dc42e0b61cc43acc369a42cb939134f"}, + {file = "types_python_dateutil-2.8.19.13-py3-none-any.whl", hash = "sha256:0b0e7c68e7043b0354b26a1e0225cb1baea7abb1b324d02b50e2d08f1221043f"}, ] [[package]] From e826661502d18bca988708f7e0eefe731ca7f98a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 11 May 2023 10:45:12 +0100 Subject: [PATCH 19/76] chore(ci): changelog rebuild (#2236) chore(ci): update changelog with latest changes Co-authored-by: Release bot --- CHANGELOG.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bb9816fb9ca..a63192360e9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,15 +7,17 @@ ## Maintenance * **ci:** schedule changelog to rebuild daily at 8am, and on release only ([#2216](https://github.com/awslabs/aws-lambda-powertools-python/issues/2216)) +* **ci:** changelog rebuild ([#2232](https://github.com/awslabs/aws-lambda-powertools-python/issues/2232)) +* **ci:** changelog rebuild ([#2230](https://github.com/awslabs/aws-lambda-powertools-python/issues/2230)) * **ci:** enforce zero trust for third party workflows ([#2215](https://github.com/awslabs/aws-lambda-powertools-python/issues/2215)) * **ci:** remove auto-merge workflow ([#2214](https://github.com/awslabs/aws-lambda-powertools-python/issues/2214)) * **ci:** skip analytics on forks ([#2225](https://github.com/awslabs/aws-lambda-powertools-python/issues/2225)) * **ci:** create pull request on changelog update ([#2224](https://github.com/awslabs/aws-lambda-powertools-python/issues/2224)) -* **ci:** changelog rebuild ([#2230](https://github.com/awslabs/aws-lambda-powertools-python/issues/2230)) * **deps:** bump zgosalvez/github-actions-ensure-sha-pinned-actions from 2.1.2 to 2.1.3 ([#2227](https://github.com/awslabs/aws-lambda-powertools-python/issues/2227)) -* **deps-dev:** bump mypy-boto3-cloudwatch from 1.26.99 to 1.26.127 ([#2219](https://github.com/awslabs/aws-lambda-powertools-python/issues/2219)) -* **deps-dev:** bump mypy-boto3-s3 from 1.26.116 to 1.26.127 ([#2218](https://github.com/awslabs/aws-lambda-powertools-python/issues/2218)) +* **deps-dev:** bump types-python-dateutil from 2.8.19.12 to 2.8.19.13 ([#2234](https://github.com/awslabs/aws-lambda-powertools-python/issues/2234)) * **deps-dev:** bump types-requests from 2.29.0.0 to 2.30.0.0 ([#2220](https://github.com/awslabs/aws-lambda-powertools-python/issues/2220)) +* **deps-dev:** bump mypy-boto3-s3 from 1.26.116 to 1.26.127 ([#2218](https://github.com/awslabs/aws-lambda-powertools-python/issues/2218)) +* **deps-dev:** bump mypy-boto3-cloudwatch from 1.26.99 to 1.26.127 ([#2219](https://github.com/awslabs/aws-lambda-powertools-python/issues/2219)) * **deps-dev:** bump cfn-lint from 0.77.4 to 0.77.5 ([#2228](https://github.com/awslabs/aws-lambda-powertools-python/issues/2228)) * **deps-dev:** bump mkdocs-material from 9.1.9 to 9.1.11 ([#2229](https://github.com/awslabs/aws-lambda-powertools-python/issues/2229)) * **deps-dev:** bump aws-cdk from 2.77.0 to 2.78.0 ([#2202](https://github.com/awslabs/aws-lambda-powertools-python/issues/2202)) From f4821edc9f4ba0c9ede64fbb296f6a19ea3f16b8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 May 2023 11:53:20 +0200 Subject: [PATCH 20/76] chore(deps-dev): bump mypy from 1.2.0 to 1.3.0 (#2233) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 54 ++++++++++++++++++++++++++--------------------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/poetry.lock b/poetry.lock index 200ebf7d691..548fb79f616 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1627,38 +1627,38 @@ tests = ["pytest (>=4.6)"] [[package]] name = "mypy" -version = "1.2.0" +version = "1.3.0" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:701189408b460a2ff42b984e6bd45c3f41f0ac9f5f58b8873bbedc511900086d"}, - {file = "mypy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe91be1c51c90e2afe6827601ca14353bbf3953f343c2129fa1e247d55fd95ba"}, - {file = "mypy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d26b513225ffd3eacece727f4387bdce6469192ef029ca9dd469940158bc89e"}, - {file = "mypy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a2d219775a120581a0ae8ca392b31f238d452729adbcb6892fa89688cb8306a"}, - {file = "mypy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:2e93a8a553e0394b26c4ca683923b85a69f7ccdc0139e6acd1354cc884fe0128"}, - {file = "mypy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3efde4af6f2d3ccf58ae825495dbb8d74abd6d176ee686ce2ab19bd025273f41"}, - {file = "mypy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:695c45cea7e8abb6f088a34a6034b1d273122e5530aeebb9c09626cea6dca4cb"}, - {file = "mypy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0e9464a0af6715852267bf29c9553e4555b61f5904a4fc538547a4d67617937"}, - {file = "mypy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8293a216e902ac12779eb7a08f2bc39ec6c878d7c6025aa59464e0c4c16f7eb9"}, - {file = "mypy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:f46af8d162f3d470d8ffc997aaf7a269996d205f9d746124a179d3abe05ac602"}, - {file = "mypy-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:031fc69c9a7e12bcc5660b74122ed84b3f1c505e762cc4296884096c6d8ee140"}, - {file = "mypy-1.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:390bc685ec209ada4e9d35068ac6988c60160b2b703072d2850457b62499e336"}, - {file = "mypy-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4b41412df69ec06ab141808d12e0bf2823717b1c363bd77b4c0820feaa37249e"}, - {file = "mypy-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e4a682b3f2489d218751981639cffc4e281d548f9d517addfd5a2917ac78119"}, - {file = "mypy-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a197ad3a774f8e74f21e428f0de7f60ad26a8d23437b69638aac2764d1e06a6a"}, - {file = "mypy-1.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c9a084bce1061e55cdc0493a2ad890375af359c766b8ac311ac8120d3a472950"}, - {file = "mypy-1.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaeaa0888b7f3ccb7bcd40b50497ca30923dba14f385bde4af78fac713d6d6f6"}, - {file = "mypy-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bea55fc25b96c53affab852ad94bf111a3083bc1d8b0c76a61dd101d8a388cf5"}, - {file = "mypy-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:4c8d8c6b80aa4a1689f2a179d31d86ae1367ea4a12855cc13aa3ba24bb36b2d8"}, - {file = "mypy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70894c5345bea98321a2fe84df35f43ee7bb0feec117a71420c60459fc3e1eed"}, - {file = "mypy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4a99fe1768925e4a139aace8f3fb66db3576ee1c30b9c0f70f744ead7e329c9f"}, - {file = "mypy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023fe9e618182ca6317ae89833ba422c411469156b690fde6a315ad10695a521"}, - {file = "mypy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d19f1a239d59f10fdc31263d48b7937c585810288376671eaf75380b074f238"}, - {file = "mypy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:2de7babe398cb7a85ac7f1fd5c42f396c215ab3eff731b4d761d68d0f6a80f48"}, - {file = "mypy-1.2.0-py3-none-any.whl", hash = "sha256:d8e9187bfcd5ffedbe87403195e1fc340189a68463903c39e2b63307c9fa0394"}, - {file = "mypy-1.2.0.tar.gz", hash = "sha256:f70a40410d774ae23fcb4afbbeca652905a04de7948eaf0b1789c8d1426b72d1"}, + {file = "mypy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eb485cea53f4f5284e5baf92902cd0088b24984f4209e25981cc359d64448d"}, + {file = "mypy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c99c3ecf223cf2952638da9cd82793d8f3c0c5fa8b6ae2b2d9ed1e1ff51ba85"}, + {file = "mypy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:550a8b3a19bb6589679a7c3c31f64312e7ff482a816c96e0cecec9ad3a7564dd"}, + {file = "mypy-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cbc07246253b9e3d7d74c9ff948cd0fd7a71afcc2b77c7f0a59c26e9395cb152"}, + {file = "mypy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:a22435632710a4fcf8acf86cbd0d69f68ac389a3892cb23fbad176d1cddaf228"}, + {file = "mypy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6e33bb8b2613614a33dff70565f4c803f889ebd2f859466e42b46e1df76018dd"}, + {file = "mypy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d23370d2a6b7a71dc65d1266f9a34e4cde9e8e21511322415db4b26f46f6b8c"}, + {file = "mypy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:658fe7b674769a0770d4b26cb4d6f005e88a442fe82446f020be8e5f5efb2fae"}, + {file = "mypy-1.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d29e324cdda61daaec2336c42512e59c7c375340bd202efa1fe0f7b8f8ca"}, + {file = "mypy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:d0b6c62206e04061e27009481cb0ec966f7d6172b5b936f3ead3d74f29fe3dcf"}, + {file = "mypy-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:76ec771e2342f1b558c36d49900dfe81d140361dd0d2df6cd71b3db1be155409"}, + {file = "mypy-1.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc95f8386314272bbc817026f8ce8f4f0d2ef7ae44f947c4664efac9adec929"}, + {file = "mypy-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:faff86aa10c1aa4a10e1a301de160f3d8fc8703b88c7e98de46b531ff1276a9a"}, + {file = "mypy-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8c5979d0deb27e0f4479bee18ea0f83732a893e81b78e62e2dda3e7e518c92ee"}, + {file = "mypy-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c5d2cc54175bab47011b09688b418db71403aefad07cbcd62d44010543fc143f"}, + {file = "mypy-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:87df44954c31d86df96c8bd6e80dfcd773473e877ac6176a8e29898bfb3501cb"}, + {file = "mypy-1.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473117e310febe632ddf10e745a355714e771ffe534f06db40702775056614c4"}, + {file = "mypy-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:74bc9b6e0e79808bf8678d7678b2ae3736ea72d56eede3820bd3849823e7f305"}, + {file = "mypy-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:44797d031a41516fcf5cbfa652265bb994e53e51994c1bd649ffcd0c3a7eccbf"}, + {file = "mypy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddae0f39ca146972ff6bb4399f3b2943884a774b8771ea0a8f50e971f5ea5ba8"}, + {file = "mypy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c4c42c60a8103ead4c1c060ac3cdd3ff01e18fddce6f1016e08939647a0e703"}, + {file = "mypy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e86c2c6852f62f8f2b24cb7a613ebe8e0c7dc1402c61d36a609174f63e0ff017"}, + {file = "mypy-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f9dca1e257d4cc129517779226753dbefb4f2266c4eaad610fc15c6a7e14283e"}, + {file = "mypy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:95d8d31a7713510685b05fbb18d6ac287a56c8f6554d88c19e73f724a445448a"}, + {file = "mypy-1.3.0-py3-none-any.whl", hash = "sha256:a8763e72d5d9574d45ce5881962bc8e9046bf7b375b0abf031f3e6811732a897"}, + {file = "mypy-1.3.0.tar.gz", hash = "sha256:e1f4d16e296f5135624b34e8fb741eb0eadedca90862405b1f1fde2040b9bd11"}, ] [package.dependencies] From 89df51e934b666359d627b926abd126dee0bd93e Mon Sep 17 00:00:00 2001 From: Ruben Fonseca Date: Thu, 11 May 2023 13:21:14 +0200 Subject: [PATCH 21/76] docs(batch_processing): snippets split, improved, and lint (#2231) Co-authored-by: Leandro Damascena --- docs/utilities/batch.md | 1006 ++--------------- .../sam/dynamodb_batch_processing.yaml | 66 ++ .../sam/kinesis_batch_processing.yaml | 53 + .../sam/sqs_batch_processing.yaml | 42 + .../src/advanced_accessing_lambda_context.py | 9 +- ...nced_accessing_lambda_context_decorator.py | 28 + ...vanced_accessing_lambda_context_manager.py | 27 + .../src/context_manager_access.py | 39 + .../src/custom_partial_processor.py | 76 ++ .../batch_processing/src/disable_tracing.py | 28 + .../batch_processing/src/extending_failure.py | 38 + .../src/getting_started_dynamodb.py | 10 +- ...etting_started_dynamodb_context_manager.py | 33 + .../src/getting_started_dynamodb_decorator.py | 33 + .../src/getting_started_dynamodb_event.json | 51 + .../getting_started_dynamodb_response.json | 7 + .../src/getting_started_kinesis.py | 1 - ...getting_started_kinesis_context_manager.py | 28 + .../src/getting_started_kinesis_decorator.py | 28 + .../src/getting_started_kinesis_event.json | 36 + .../src/getting_started_kinesis_response.json | 7 + .../src/getting_started_sqs.py | 1 - .../getting_started_sqs_context_manager.py | 29 + .../src/getting_started_sqs_decorator.py | 29 + .../src/getting_started_sqs_event.json | 36 + .../src/getting_started_sqs_fifo.py | 7 +- ...etting_started_sqs_fifo_context_manager.py | 7 +- .../src/getting_started_sqs_fifo_decorator.py | 7 +- .../src/getting_started_sqs_response.json | 7 + .../src/getting_started_with_test.py | 45 + .../src/getting_started_with_test_app.py | 28 + .../batch_processing/src/pydantic_dynamodb.py | 58 + .../src/pydantic_dynamodb_event.json | 50 + .../batch_processing/src/pydantic_kinesis.py | 42 + .../src/pydantic_kinesis_event.json | 36 + examples/batch_processing/src/pydantic_sqs.py | 35 + .../src/pydantic_sqs_event.json | 36 + .../src/sentry_error_tracking.py | 9 + poetry.lock | 48 +- pyproject.toml | 1 + 40 files changed, 1212 insertions(+), 945 deletions(-) create mode 100644 examples/batch_processing/sam/dynamodb_batch_processing.yaml create mode 100644 examples/batch_processing/sam/kinesis_batch_processing.yaml create mode 100644 examples/batch_processing/sam/sqs_batch_processing.yaml create mode 100644 examples/batch_processing/src/advanced_accessing_lambda_context_decorator.py create mode 100644 examples/batch_processing/src/advanced_accessing_lambda_context_manager.py create mode 100644 examples/batch_processing/src/context_manager_access.py create mode 100644 examples/batch_processing/src/custom_partial_processor.py create mode 100644 examples/batch_processing/src/disable_tracing.py create mode 100644 examples/batch_processing/src/extending_failure.py create mode 100644 examples/batch_processing/src/getting_started_dynamodb_context_manager.py create mode 100644 examples/batch_processing/src/getting_started_dynamodb_decorator.py create mode 100644 examples/batch_processing/src/getting_started_dynamodb_event.json create mode 100644 examples/batch_processing/src/getting_started_dynamodb_response.json create mode 100644 examples/batch_processing/src/getting_started_kinesis_context_manager.py create mode 100644 examples/batch_processing/src/getting_started_kinesis_decorator.py create mode 100644 examples/batch_processing/src/getting_started_kinesis_event.json create mode 100644 examples/batch_processing/src/getting_started_kinesis_response.json create mode 100644 examples/batch_processing/src/getting_started_sqs_context_manager.py create mode 100644 examples/batch_processing/src/getting_started_sqs_decorator.py create mode 100644 examples/batch_processing/src/getting_started_sqs_event.json create mode 100644 examples/batch_processing/src/getting_started_sqs_response.json create mode 100644 examples/batch_processing/src/getting_started_with_test.py create mode 100644 examples/batch_processing/src/getting_started_with_test_app.py create mode 100644 examples/batch_processing/src/pydantic_dynamodb.py create mode 100644 examples/batch_processing/src/pydantic_dynamodb_event.json create mode 100644 examples/batch_processing/src/pydantic_kinesis.py create mode 100644 examples/batch_processing/src/pydantic_kinesis_event.json create mode 100644 examples/batch_processing/src/pydantic_sqs.py create mode 100644 examples/batch_processing/src/pydantic_sqs_event.json create mode 100644 examples/batch_processing/src/sentry_error_tracking.py diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md index c4d7dc26e6c..993f343becd 100644 --- a/docs/utilities/batch.md +++ b/docs/utilities/batch.md @@ -21,7 +21,7 @@ If your function fails to process any message from the batch, the entire batch r With this utility, batch records are processed individually – only messages that failed to be processed return to the queue or stream for a further retry. This works when two mechanisms are in place: 1. `ReportBatchItemFailures` is set in your SQS, Kinesis, or DynamoDB event source properties -2. [A specific response](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#sqs-batchfailurereporting-syntax){target="_blank"} is returned so Lambda knows which records should not be deleted during partial responses +2. [A specific response](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#services-sqs-batchfailurereporting){target="_blank"} is returned so Lambda knows which records should not be deleted during partial responses @@ -32,7 +32,7 @@ With this utility, batch records are processed individually – only messages th ## Getting started -Regardless whether you're using SQS, Kinesis Data Streams or DynamoDB Streams, you must configure your Lambda function event source to use ``ReportBatchItemFailures`. +Regardless whether you're using SQS, Kinesis Data Streams or DynamoDB Streams, you must configure your Lambda function event source to use `ReportBatchItemFailures`. You do not need any additional IAM permissions to use this utility, except for what each event source requires. @@ -42,179 +42,20 @@ The remaining sections of the documentation will rely on these samples. For comp === "SQS" - ```yaml title="template.yaml" hl_lines="31-32" - AWSTemplateFormatVersion: '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: partial batch response sample - - Globals: - Function: - Timeout: 5 - MemorySize: 256 - Runtime: python3.9 - Tracing: Active - Environment: - Variables: - LOG_LEVEL: INFO - POWERTOOLS_SERVICE_NAME: hello - - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Handler: app.lambda_handler - CodeUri: hello_world - Policies: - - SQSPollerPolicy: - QueueName: !GetAtt SampleQueue.QueueName - Events: - Batch: - Type: SQS - Properties: - Queue: !GetAtt SampleQueue.Arn - FunctionResponseTypes: - - ReportBatchItemFailures - - SampleDLQ: - Type: AWS::SQS::Queue - - SampleQueue: - Type: AWS::SQS::Queue - Properties: - VisibilityTimeout: 30 # Fn timeout * 6 - RedrivePolicy: - maxReceiveCount: 2 - deadLetterTargetArn: !GetAtt SampleDLQ.Arn + ```yaml title="template.yaml" hl_lines="30-31" + --8<-- "examples/batch_processing/sam/sqs_batch_processing.yaml" ``` === "Kinesis Data Streams" ```yaml title="template.yaml" hl_lines="44-45" - AWSTemplateFormatVersion: '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: partial batch response sample - - Globals: - Function: - Timeout: 5 - MemorySize: 256 - Runtime: python3.9 - Tracing: Active - Environment: - Variables: - LOG_LEVEL: INFO - POWERTOOLS_SERVICE_NAME: hello - - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Handler: app.lambda_handler - CodeUri: hello_world - Policies: - # Lambda Destinations require additional permissions - # to send failure records to DLQ from Kinesis/DynamoDB - - Version: "2012-10-17" - Statement: - Effect: "Allow" - Action: - - sqs:GetQueueAttributes - - sqs:GetQueueUrl - - sqs:SendMessage - Resource: !GetAtt SampleDLQ.Arn - Events: - KinesisStream: - Type: Kinesis - Properties: - Stream: !GetAtt SampleStream.Arn - BatchSize: 100 - StartingPosition: LATEST - MaximumRetryAttempts: 2 - DestinationConfig: - OnFailure: - Destination: !GetAtt SampleDLQ.Arn - FunctionResponseTypes: - - ReportBatchItemFailures - - SampleDLQ: - Type: AWS::SQS::Queue - - SampleStream: - Type: AWS::Kinesis::Stream - Properties: - ShardCount: 1 + --8<-- "examples/batch_processing/sam/kinesis_batch_processing.yaml" ``` === "DynamoDB Streams" ```yaml title="template.yaml" hl_lines="43-44" - AWSTemplateFormatVersion: '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: partial batch response sample - - Globals: - Function: - Timeout: 5 - MemorySize: 256 - Runtime: python3.9 - Tracing: Active - Environment: - Variables: - LOG_LEVEL: INFO - POWERTOOLS_SERVICE_NAME: hello - - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Handler: app.lambda_handler - CodeUri: hello_world - Policies: - # Lambda Destinations require additional permissions - # to send failure records from Kinesis/DynamoDB - - Version: "2012-10-17" - Statement: - Effect: "Allow" - Action: - - sqs:GetQueueAttributes - - sqs:GetQueueUrl - - sqs:SendMessage - Resource: !GetAtt SampleDLQ.Arn - Events: - DynamoDBStream: - Type: DynamoDB - Properties: - Stream: !GetAtt SampleTable.StreamArn - StartingPosition: LATEST - MaximumRetryAttempts: 2 - DestinationConfig: - OnFailure: - Destination: !GetAtt SampleDLQ.Arn - FunctionResponseTypes: - - ReportBatchItemFailures - - SampleDLQ: - Type: AWS::SQS::Queue - - SampleTable: - Type: AWS::DynamoDB::Table - Properties: - BillingMode: PAY_PER_REQUEST - AttributeDefinitions: - - AttributeName: pk - AttributeType: S - - AttributeName: sk - AttributeType: S - KeySchema: - - AttributeName: pk - KeyType: HASH - - AttributeName: sk - KeyType: RANGE - SSESpecification: - SSEEnabled: yes - StreamSpecification: - StreamViewType: NEW_AND_OLD_IMAGES - + --8<-- "examples/batch_processing/sam/dynamodb_batch_processing.yaml" ``` ### Processing messages from SQS @@ -230,126 +71,34 @@ Processing batches from SQS works in three stages: === "Recommended" - ```python hl_lines="4 9 12 18 29" + ```python hl_lines="4-9 12 18 28" --8<-- "examples/batch_processing/src/getting_started_sqs.py" ``` === "As a context manager" - ```python hl_lines="4-5 9 15 24-26 28" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType - from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.SQS) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - def lambda_handler(event, context: LambdaContext): - batch = event["Records"] - with processor(records=batch, handler=record_handler): - processed_messages = processor.process() # kick off processing, return list[tuple] - - return processor.response() + ```python hl_lines="4-5 8 14 25-26 29" + --8<-- "examples/batch_processing/src/getting_started_sqs_context_manager.py" ``` === "As a decorator (legacy)" - ```python hl_lines="4-5 9 15 23 25" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.SQS) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context: LambdaContext): - return processor.response() + ```python hl_lines="4-9 12 18 27 29" + --8<-- "examples/batch_processing/src/getting_started_sqs_decorator.py" ``` === "Sample response" The second record failed to be processed, therefore the processor added its message ID in the response. - ```python - { - 'batchItemFailures': [ - { - 'itemIdentifier': '244fc6b4-87a3-44ab-83d2-361172410c3a' - } - ] - } + ```json + --8<-- "examples/batch_processing/src/getting_started_sqs_response.json" ``` === "Sample event" ```json - { - "Records": [ - { - "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", - "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", - "body": "{\"Message\": \"success\"}", - "attributes": { - "ApproximateReceiveCount": "1", - "SentTimestamp": "1545082649183", - "SenderId": "AIDAIENQZJOLO23YVJ4VO", - "ApproximateFirstReceiveTimestamp": "1545082649185" - }, - "messageAttributes": {}, - "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", - "eventSource": "aws:sqs", - "eventSourceARN": "arn:aws:sqs:us-east-2: 123456789012:my-queue", - "awsRegion": "us-east-1" - }, - { - "messageId": "244fc6b4-87a3-44ab-83d2-361172410c3a", - "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", - "body": "SGVsbG8sIHRoaXMgaXMgYSB0ZXN0Lg==", - "attributes": { - "ApproximateReceiveCount": "1", - "SentTimestamp": "1545082649183", - "SenderId": "AIDAIENQZJOLO23YVJ4VO", - "ApproximateFirstReceiveTimestamp": "1545082649185" - }, - "messageAttributes": {}, - "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", - "eventSource": "aws:sqs", - "eventSourceARN": "arn:aws:sqs:us-east-2: 123456789012:my-queue", - "awsRegion": "us-east-1" - } - ] - } + --8<-- "examples/batch_processing/src/getting_started_sqs_event.json" ``` #### FIFO queues @@ -359,19 +108,19 @@ This helps preserve the ordering of messages in your queue. === "Recommended" - ```python hl_lines="3 9" + ```python hl_lines="5-6 11 27" --8<-- "examples/batch_processing/src/getting_started_sqs_fifo.py" ``` === "As a context manager" - ```python hl_lines="2 6" + ```python hl_lines="4 8" --8<-- "examples/batch_processing/src/getting_started_sqs_fifo_context_manager.py" ``` === "As a decorator (legacy)" - ```python hl_lines="3 9" + ```python hl_lines="5-6 11 26" --8<-- "examples/batch_processing/src/getting_started_sqs_fifo_decorator.py" ``` @@ -388,122 +137,34 @@ Processing batches from Kinesis works in three stages: === "Recommended" - ```python hl_lines="2 7 12 18 28" + ```python hl_lines="2-9 12 18 27" --8<-- "examples/batch_processing/src/getting_started_kinesis.py" ``` === "As a context manager" - ```python hl_lines="4-5 9 15 23-25 27" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType - from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.KinesisDataStreams) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: KinesisStreamRecord): - logger.info(record.kinesis.data_as_text) - payload: dict = record.kinesis.data_as_json() - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - def lambda_handler(event, context: LambdaContext): - batch = event["Records"] - with processor(records=batch, handler=record_handler): - processed_messages = processor.process() # kick off processing, return list[tuple] - - return processor.response() + ```python hl_lines="3-5 8 14 23-25 28" + --8<-- "examples/batch_processing/src/getting_started_kinesis_context_manager.py" ``` === "As a decorator (legacy)" - ```python hl_lines="2-3 7 20 22" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.KinesisDataStreams) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: KinesisStreamRecord): - logger.info(record.kinesis.data_as_text) - payload: dict = record.kinesis.data_as_json() - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context: LambdaContext): - return processor.response() + ```python hl_lines="2-9 12 18 26" + --8<-- "examples/batch_processing/src/getting_started_kinesis_decorator.py" ``` === "Sample response" The second record failed to be processed, therefore the processor added its sequence number in the response. - ```python - { - 'batchItemFailures': [ - { - 'itemIdentifier': '6006958808509702859251049540584488075644979031228738' - } - ] - } + ```json + --8<-- "examples/batch_processing/src/getting_started_kinesis_response.json" ``` === "Sample event" ```json - { - "Records": [ - { - "kinesis": { - "kinesisSchemaVersion": "1.0", - "partitionKey": "1", - "sequenceNumber": "4107859083838847772757075850904226111829882106684065", - "data": "eyJNZXNzYWdlIjogInN1Y2Nlc3MifQ==", - "approximateArrivalTimestamp": 1545084650.987 - }, - "eventSource": "aws:kinesis", - "eventVersion": "1.0", - "eventID": "shardId-000000000006:4107859083838847772757075850904226111829882106684065", - "eventName": "aws:kinesis:record", - "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", - "awsRegion": "us-east-2", - "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" - }, - { - "kinesis": { - "kinesisSchemaVersion": "1.0", - "partitionKey": "1", - "sequenceNumber": "6006958808509702859251049540584488075644979031228738", - "data": "c3VjY2Vzcw==", - "approximateArrivalTimestamp": 1545084650.987 - }, - "eventSource": "aws:kinesis", - "eventVersion": "1.0", - "eventID": "shardId-000000000006:6006958808509702859251049540584488075644979031228738", - "eventName": "aws:kinesis:record", - "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", - "awsRegion": "us-east-2", - "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" - } - ] - } + --8<-- "examples/batch_processing/src/getting_started_kinesis_event.json" ``` ### Processing messages from DynamoDB @@ -519,138 +180,34 @@ Processing batches from DynamoDB Streams works in three stages: === "Recommended" - ```python hl_lines="4 9 14 20 30" + ```python hl_lines="4-11 14 20 32" --8<-- "examples/batch_processing/src/getting_started_dynamodb.py" ``` === "As a context manager" - ```python hl_lines="4-5 9 15 23-27" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType - from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.DynamoDBStreams) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: DynamoDBRecord): - logger.info(record.dynamodb.new_image) - payload: dict = json.loads(record.dynamodb.new_image.get("Message")) - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - def lambda_handler(event, context: LambdaContext): - batch = event["Records"] - with processor(records=batch, handler=record_handler): - processed_messages = processor.process() # kick off processing, return list[tuple] - - return processor.response() + ```python hl_lines="5-7 10 16 28-30 33" + --8<-- "examples/batch_processing/src/getting_started_dynamodb_context_manager.py" ``` === "As a decorator (legacy)" - ```python hl_lines="4-5 9 15 22 24" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.DynamoDBStreams) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: DynamoDBRecord): - logger.info(record.dynamodb.new_image) - payload: dict = json.loads(record.dynamodb.new_image.get("Message")) - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context: LambdaContext): - return processor.response() + ```python hl_lines="4-11 14 20 31" + --8<-- "examples/batch_processing/src/getting_started_dynamodb_decorator.py" ``` === "Sample response" The second record failed to be processed, therefore the processor added its sequence number in the response. - ```python - { - 'batchItemFailures': [ - { - 'itemIdentifier': '8640712661' - } - ] - } + ```json + --8<-- "examples/batch_processing/src/getting_started_dynamodb_response.json" ``` === "Sample event" ```json - { - "Records": [ - { - "eventID": "1", - "eventVersion": "1.0", - "dynamodb": { - "Keys": { - "Id": { - "N": "101" - } - }, - "NewImage": { - "Message": { - "S": "failure" - } - }, - "StreamViewType": "NEW_AND_OLD_IMAGES", - "SequenceNumber": "3275880929", - "SizeBytes": 26 - }, - "awsRegion": "us-west-2", - "eventName": "INSERT", - "eventSourceARN": "eventsource_arn", - "eventSource": "aws:dynamodb" - }, - { - "eventID": "1", - "eventVersion": "1.0", - "dynamodb": { - "Keys": { - "Id": { - "N": "101" - } - }, - "NewImage": { - "SomethingElse": { - "S": "success" - } - }, - "StreamViewType": "NEW_AND_OLD_IMAGES", - "SequenceNumber": "8640712661", - "SizeBytes": 26 - }, - "awsRegion": "us-west-2", - "eventName": "INSERT", - "eventSourceARN": "eventsource_arn", - "eventSource": "aws:dynamodb" - } - ] - } + --8<-- "examples/batch_processing/src/getting_started_dynamodb_event.json" ``` ### Partial failure mechanics @@ -674,14 +231,14 @@ You can use `AsyncBatchProcessor` class and `async_process_partial_response` fun The reason this is not the default behaviour is that not all use cases can handle concurrency safely (e.g., loyalty points must be updated in order). -```python hl_lines="3 11 14 24" title="High-concurrency with AsyncBatchProcessor" +```python hl_lines="3 11 14 24-26" title="High-concurrency with AsyncBatchProcessor" --8<-- "examples/batch_processing/src/getting_started_async.py" ``` ???+ warning "Using tracer?" `AsyncBatchProcessor` uses `asyncio.gather` which can cause side effects and reach trace limits at high concurrency. - See [Tracing concurrent asynchronous functions](../core/tracer.md#concurrent-asynchronous-functions). + See [Tracing concurrent asynchronous functions](../core/tracer.md#concurrent-asynchronous-functions){target="_blank"}. ## Advanced @@ -693,130 +250,38 @@ Inheritance is importance because we need to access message IDs and sequence num === "SQS" - ```python hl_lines="5 14 23 29" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, process_partial_response - from aws_lambda_powertools.utilities.parser.models import SqsRecordModel - from aws_lambda_powertools.utilities.typing import LambdaContext - from aws_lambda_powertools.utilities.parser import BaseModel - from aws_lambda_powertools.utilities.parser.types import Json - - - class Order(BaseModel): - item: dict - - class OrderSqsRecord(SqsRecordModel): - body: Json[Order] # deserialize order data from JSON string - - processor = BatchProcessor(event_type=EventType.SQS, model=OrderSqsRecord) - tracer = Tracer() - logger = Logger() - + ```python hl_lines="8 17 27 35" + --8<-- "examples/batch_processing/src/pydantic_sqs.py" + ``` - @tracer.capture_method - def record_handler(record: OrderSqsRecord): - return record.body.item +=== "SQS - Sample Event " - @logger.inject_lambda_context - @tracer.capture_lambda_handler - def lambda_handler(event, context: LambdaContext): - return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context) + ```json hl_lines="6 22" + --8<-- "examples/batch_processing/src/pydantic_sqs_event.json" ``` === "Kinesis Data Streams" - ```python hl_lines="5 15 19 23 29 36" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, process_partial_response - from aws_lambda_powertools.utilities.parser.models import KinesisDataStreamRecordPayload, KinesisDataStreamRecord - from aws_lambda_powertools.utilities.parser import BaseModel, validator - from aws_lambda_powertools.utilities.parser.types import Json - from aws_lambda_powertools.utilities.typing import LambdaContext - - - class Order(BaseModel): - item: dict - - - class OrderKinesisPayloadRecord(KinesisDataStreamRecordPayload): - data: Json[Order] - - - class OrderKinesisRecord(KinesisDataStreamRecord): - kinesis: OrderKinesisPayloadRecord - - - processor = BatchProcessor(event_type=EventType.KinesisDataStreams, model=OrderKinesisRecord) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: OrderKinesisRecord): - return record.kinesis.data.item + ```python hl_lines="9 10 20 28 34 42" + --8<-- "examples/batch_processing/src/pydantic_kinesis.py" + ``` +=== "Kinesis - Sample Event " - @logger.inject_lambda_context - @tracer.capture_lambda_handler - def lambda_handler(event, context: LambdaContext): - return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context) + ```json hl_lines="8 24" + --8<-- "examples/batch_processing/src/pydantic_kinesis_event.json" ``` === "DynamoDB Streams" - ```python hl_lines="7 16 26 31 35 41" - import json - - from typing import Dict, Literal, Optional - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, process_partial_response - from aws_lambda_powertools.utilities.parser.models import DynamoDBStreamChangedRecordModel, DynamoDBStreamRecordModel - from aws_lambda_powertools.utilities.typing import LambdaContext - from aws_lambda_powertools.utilities.parser import BaseModel, validator - - - class Order(BaseModel): - item: dict - - - class OrderDynamoDB(BaseModel): - Message: Order - - # auto transform json string - # so Pydantic can auto-initialize nested Order model - @validator("Message", pre=True) - def transform_message_to_dict(cls, value: Dict[Literal["S"], str]): - return json.loads(value["S"]) - - - class OrderDynamoDBChangeRecord(DynamoDBStreamChangedRecordModel): - NewImage: Optional[OrderDynamoDB] - OldImage: Optional[OrderDynamoDB] - - - class OrderDynamoDBRecord(DynamoDBStreamRecordModel): - dynamodb: OrderDynamoDBChangeRecord - - - processor = BatchProcessor(event_type=EventType.DynamoDBStreams, model=OrderDynamoDBRecord) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: OrderDynamoDBRecord): - return record.dynamodb.NewImage.Message.item + ```python hl_lines="14-15 24 34 39 43 49 58" + --8<-- "examples/batch_processing/src/pydantic_dynamodb.py" + ``` +=== "DynamoDB - Sample Event " - @logger.inject_lambda_context - @tracer.capture_lambda_handler - def lambda_handler(event, context: LambdaContext): - return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context) + ```json hl_lines="13-15 36-38" + --8<-- "examples/batch_processing/src/pydantic_dynamodb_event.json" ``` ### Accessing processed messages @@ -826,46 +291,8 @@ Use the context manager to access a list of all returned values from your `recor * **When successful**. We will include a tuple with `success`, the result of `record_handler`, and the batch record * **When failed**. We will include a tuple with `fail`, exception as a string, and the batch record -```python hl_lines="30-36" title="Accessing processed messages via context manager" -import json - -from typing import Any, List, Literal, Union - -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.utilities.batch import (BatchProcessor, - EventType, - FailureResponse, - SuccessResponse) -from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord -from aws_lambda_powertools.utilities.typing import LambdaContext - - -processor = BatchProcessor(event_type=EventType.SQS) -tracer = Tracer() -logger = Logger() - - -@tracer.capture_method -def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - ... - -@logger.inject_lambda_context -@tracer.capture_lambda_handler -def lambda_handler(event, context: LambdaContext): - batch = event["Records"] - with processor(records=batch, handler=record_handler): - processed_messages: List[Union[SuccessResponse, FailureResponse]] = processor.process() - - for message in processed_messages: - status: Union[Literal["success"], Literal["fail"]] = message[0] - result: Any = message[1] - record: SQSRecord = message[2] - - - return processor.response() +```python hl_lines="28-33" title="Accessing processed messages via context manager" +--8<-- "examples/batch_processing/src/context_manager_access.py" ``` ### Accessing Lambda Context @@ -876,69 +303,20 @@ We can automatically inject the [Lambda context](https://docs.aws.amazon.com/lam === "Recommended" - ```python hl_lines="19" + ```python hl_lines="18 27" --8<-- "examples/batch_processing/src/advanced_accessing_lambda_context.py" ``` === "As a decorator (legacy)" - ```python hl_lines="15" - from typing import Optional - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import (BatchProcessor, EventType, - batch_processor) - from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - processor = BatchProcessor(event_type=EventType.SQS) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: SQSRecord, lambda_context: Optional[LambdaContext] = None): - if lambda_context is not None: - remaining_time = lambda_context.get_remaining_time_in_millis() - ... - - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context: LambdaContext): - return processor.response() + ```python hl_lines="18 26" + --8<-- "examples/batch_processing/src/advanced_accessing_lambda_context_decorator.py" ``` === "As a context manager" - ```python hl_lines="14 23" - from typing import Optional - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType - from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - processor = BatchProcessor(event_type=EventType.SQS) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: SQSRecord, lambda_context: Optional[LambdaContext] = None): - if lambda_context is not None: - remaining_time = lambda_context.get_remaining_time_in_millis() - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - def lambda_handler(event, context: LambdaContext): - batch = event["Records"] - with processor(records=batch, handler=record_handler, lambda_context=context): - result = processor.process() - - return result + ```python hl_lines="14 24" + --8<-- "examples/batch_processing/src/advanced_accessing_lambda_context_manager.py" ``` ### Extending BatchProcessor @@ -953,107 +331,23 @@ For these scenarios, you can subclass `BatchProcessor` and quickly override `suc ???+ example Let's suppose you'd like to add a metric named `BatchRecordFailures` for each batch record that failed processing -```python title="Extending failure handling mechanism in BatchProcessor" - -from typing import Tuple - -from aws_lambda_powertools import Metrics -from aws_lambda_powertools.metrics import MetricUnit -from aws_lambda_powertools.utilities.batch import BatchProcessor, ExceptionInfo, EventType, FailureResponse, process_partial_response -from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - - -class MyProcessor(BatchProcessor): - def failure_handler(self, record: SQSRecord, exception: ExceptionInfo) -> FailureResponse: - metrics.add_metric(name="BatchRecordFailures", unit=MetricUnit.Count, value=1) - return super().failure_handler(record, exception) - -processor = MyProcessor(event_type=EventType.SQS) -metrics = Metrics(namespace="test") - - -@tracer.capture_method -def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - ... - -@metrics.log_metrics(capture_cold_start_metric=True) -def lambda_handler(event, context: LambdaContext): - return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context) +```python hl_lines="8 9 16-19 22 38" title="Extending failure handling mechanism in BatchProcessor" +--8<-- "examples/batch_processing/src/extending_failure.py" ``` ### Create your own partial processor -You can create your own partial batch processor from scratch by inheriting the `BasePartialProcessor` class, and implementing `_prepare()`, `_clean()` and `_process_record()`. +You can create your own partial batch processor from scratch by inheriting the `BasePartialProcessor` class, and implementing `_prepare()`, `_clean()`, `_process_record()` and `_async_process_record()`. * **`_process_record()`** – handles all processing logic for each individual message of a batch, including calling the `record_handler` (self.handler) * **`_prepare()`** – called once as part of the processor initialization -* **`clean()`** – teardown logic called once after `_process_record` completes +* **`_clean()`** – teardown logic called once after `_process_record` completes +* **`_async_process_record()`** – If you need to implement asynchronous logic, use this method, otherwise define it in your class with empty logic You can then use this class as a context manager, or pass it to `batch_processor` to use as a decorator on your Lambda handler function. -```python hl_lines="3 9 24 30 37 57" title="Creating a custom batch processor" -from random import randint - -from aws_lambda_powertools.utilities.batch import BasePartialProcessor, batch_processor -import boto3 -import os - -table_name = os.getenv("TABLE_NAME", "table_not_found") - -class MyPartialProcessor(BasePartialProcessor): - """ - Process a record and stores successful results at a Amazon DynamoDB Table - - Parameters - ---------- - table_name: str - DynamoDB table name to write results to - """ - - def __init__(self, table_name: str): - self.table_name = table_name - - super().__init__() - - def _prepare(self): - # It's called once, *before* processing - # Creates table resource and clean previous results - self.ddb_table = boto3.resource("dynamodb").Table(self.table_name) - self.success_messages.clear() - - def _clean(self): - # It's called once, *after* closing processing all records (closing the context manager) - # Here we're sending, at once, all successful messages to a ddb table - with self.ddb_table.batch_writer() as batch: - for result in self.success_messages: - batch.put_item(Item=result) - - def _process_record(self, record): - # It handles how your record is processed - # Here we're keeping the status of each run - # where self.handler is the record_handler function passed as an argument - try: - result = self.handler(record) # record_handler passed to decorator/context manager - return self.success_handler(record, result) - except Exception as exc: - return self.failure_handler(record, exc) - - def success_handler(self, record): - entry = ("success", result, record) - message = {"age": result} - self.success_messages.append(message) - return entry - - -def record_handler(record): - return randint(0, 100) - -@batch_processor(record_handler=record_handler, processor=MyPartialProcessor(table_name)) -def lambda_handler(event, context): - return {"statusCode": 200} +```python hl_lines="9-12 20 35 41 48 59 64 68 76" title="Creating a custom batch processor" +--8<-- "examples/batch_processing/src/custom_partial_processor.py" ``` ### Caveats @@ -1064,33 +358,8 @@ When using Tracer to capture responses for each batch record processing, you mig If that's the case, you can configure [Tracer to disable response auto-capturing](../core/tracer.md#disabling-response-auto-capture){target="_blank"}. -```python hl_lines="14" title="Disabling Tracer response auto-capturing" -import json - -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor -from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord -from aws_lambda_powertools.utilities.typing import LambdaContext - - -processor = BatchProcessor(event_type=EventType.SQS) -tracer = Tracer() -logger = Logger() - - -@tracer.capture_method(capture_response=False) -def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - ... - -@logger.inject_lambda_context -@tracer.capture_lambda_handler -@batch_processor(record_handler=record_handler, processor=processor) -def lambda_handler(event, context: LambdaContext): - return processor.response() - +```python hl_lines="17" title="Disabling Tracer response auto-capturing" +--8<-- "examples/batch_processing/src/disable_tracing.py" ``` ## Testing your code @@ -1101,135 +370,27 @@ As there is no external calls, you can unit test your code with `BatchProcessor` Given a SQS batch where the first batch record succeeds and the second fails processing, we should have a single item reported in the function response. -=== "test_app.py" +=== "getting_started_with_test.py" ```python - import json - - from pathlib import Path - from dataclasses import dataclass - - import pytest - from src.app import lambda_handler, processor - - - def load_event(path: Path): - with path.open() as f: - return json.load(f) - - - @pytest.fixture - def lambda_context(): - @dataclass - class LambdaContext: - function_name: str = "test" - memory_limit_in_mb: int = 128 - invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" - aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" - - return LambdaContext() - - @pytest.fixture() - def sqs_event(): - """Generates API GW Event""" - return load_event(path=Path("events/sqs_event.json")) - - - def test_app_batch_partial_response(sqs_event, lambda_context): - # GIVEN - processor = app.processor # access processor for additional assertions - successful_record = sqs_event["Records"][0] - failed_record = sqs_event["Records"][1] - expected_response = { - "batchItemFailures: [ - { - "itemIdentifier": failed_record["messageId"] - } - ] - } - - # WHEN - ret = app.lambda_handler(sqs_event, lambda_context) - - # THEN - assert ret == expected_response - assert len(processor.fail_messages) == 1 - assert processor.success_messages[0] == successful_record + --8<-- "examples/batch_processing/src/getting_started_with_test.py" ``` -=== "src/app.py" +=== "getting_started_with_test_app.py" ```python - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, process_partial_response - from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.SQS) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - def lambda_handler(event, context: LambdaContext): - return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context) + --8<-- "examples/batch_processing/src/getting_started_with_test_app.py" ``` === "Sample SQS event" - ```json title="events/sqs_sample.json" - { - "Records": [ - { - "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", - "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", - "body": "{\"Message\": \"success\"}", - "attributes": { - "ApproximateReceiveCount": "1", - "SentTimestamp": "1545082649183", - "SenderId": "AIDAIENQZJOLO23YVJ4VO", - "ApproximateFirstReceiveTimestamp": "1545082649185" - }, - "messageAttributes": {}, - "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", - "eventSource": "aws:sqs", - "eventSourceARN": "arn:aws:sqs:us-east-2: 123456789012:my-queue", - "awsRegion": "us-east-1" - }, - { - "messageId": "244fc6b4-87a3-44ab-83d2-361172410c3a", - "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", - "body": "SGVsbG8sIHRoaXMgaXMgYSB0ZXN0Lg==", - "attributes": { - "ApproximateReceiveCount": "1", - "SentTimestamp": "1545082649183", - "SenderId": "AIDAIENQZJOLO23YVJ4VO", - "ApproximateFirstReceiveTimestamp": "1545082649185" - }, - "messageAttributes": {}, - "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", - "eventSource": "aws:sqs", - "eventSourceARN": "arn:aws:sqs:us-east-2: 123456789012:my-queue", - "awsRegion": "us-east-1" - } - ] - } + ```json title="events/sqs_event.json" + --8<-- "examples/batch_processing/src/getting_started_sqs_event.json" ``` ## FAQ -### Choosing between decorator and context manager +### Choosing between method and context manager Use context manager when you want access to the processed messages or handle `BatchProcessingError` exception when all records within the batch fail to be processed. @@ -1243,17 +404,8 @@ As 2.12.0, `process_partial_response` and `async_process_partial_response` are t When using Sentry.io for error monitoring, you can override `failure_handler` to capture each processing exception with Sentry SDK: -> Credits to [Charles-Axel Dein](https://github.com/awslabs/aws-lambda-powertools-python/issues/293#issuecomment-781961732) - -```python hl_lines="4 7-8" title="Integrating error tracking with Sentry.io" -from typing import Tuple - -from aws_lambda_powertools.utilities.batch import BatchProcessor, FailureResponse -from sentry_sdk import capture_exception - +> Credits to [Charles-Axel Dein](https://github.com/awslabs/aws-lambda-powertools-python/issues/293#issuecomment-781961732){target="_blank"} -class MyProcessor(BatchProcessor): - def failure_handler(self, record, exception) -> FailureResponse: - capture_exception() # send exception to Sentry - return super().failure_handler(record, exception) +```python hl_lines="1 7-8" title="Integrating error tracking with Sentry.io" +--8<-- "examples/batch_processing/src/sentry_error_tracking.py" ``` diff --git a/examples/batch_processing/sam/dynamodb_batch_processing.yaml b/examples/batch_processing/sam/dynamodb_batch_processing.yaml new file mode 100644 index 00000000000..2ed70d65a86 --- /dev/null +++ b/examples/batch_processing/sam/dynamodb_batch_processing.yaml @@ -0,0 +1,66 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: partial batch response sample + +Globals: + Function: + Timeout: 5 + MemorySize: 256 + Runtime: python3.10 + Tracing: Active + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_SERVICE_NAME: hello + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: hello_world + Policies: + # Lambda Destinations require additional permissions + # to send failure records from Kinesis/DynamoDB + - Version: "2012-10-17" + Statement: + Effect: "Allow" + Action: + - sqs:GetQueueAttributes + - sqs:GetQueueUrl + - sqs:SendMessage + Resource: !GetAtt SampleDLQ.Arn + Events: + DynamoDBStream: + Type: DynamoDB + Properties: + Stream: !GetAtt SampleTable.StreamArn + StartingPosition: LATEST + MaximumRetryAttempts: 2 + DestinationConfig: + OnFailure: + Destination: !GetAtt SampleDLQ.Arn + FunctionResponseTypes: + - ReportBatchItemFailures + + SampleDLQ: + Type: AWS::SQS::Queue + + SampleTable: + Type: AWS::DynamoDB::Table + Properties: + BillingMode: PAY_PER_REQUEST + AttributeDefinitions: + - AttributeName: pk + AttributeType: S + - AttributeName: sk + AttributeType: S + KeySchema: + - AttributeName: pk + KeyType: HASH + - AttributeName: sk + KeyType: RANGE + SSESpecification: + SSEEnabled: true + StreamSpecification: + StreamViewType: NEW_AND_OLD_IMAGES diff --git a/examples/batch_processing/sam/kinesis_batch_processing.yaml b/examples/batch_processing/sam/kinesis_batch_processing.yaml new file mode 100644 index 00000000000..28b2c58402b --- /dev/null +++ b/examples/batch_processing/sam/kinesis_batch_processing.yaml @@ -0,0 +1,53 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: partial batch response sample + +Globals: + Function: + Timeout: 5 + MemorySize: 256 + Runtime: python3.10 + Tracing: Active + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_SERVICE_NAME: hello + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: hello_world + Policies: + # Lambda Destinations require additional permissions + # to send failure records to DLQ from Kinesis/DynamoDB + - Version: "2012-10-17" + Statement: + Effect: "Allow" + Action: + - sqs:GetQueueAttributes + - sqs:GetQueueUrl + - sqs:SendMessage + Resource: !GetAtt SampleDLQ.Arn + Events: + KinesisStream: + Type: Kinesis + Properties: + Stream: !GetAtt SampleStream.Arn + BatchSize: 100 + StartingPosition: LATEST + MaximumRetryAttempts: 2 + DestinationConfig: + OnFailure: + Destination: !GetAtt SampleDLQ.Arn + FunctionResponseTypes: + - ReportBatchItemFailures + + SampleDLQ: + Type: AWS::SQS::Queue + + SampleStream: + Type: AWS::Kinesis::Stream + Properties: + ShardCount: 1 diff --git a/examples/batch_processing/sam/sqs_batch_processing.yaml b/examples/batch_processing/sam/sqs_batch_processing.yaml new file mode 100644 index 00000000000..00bbd00e569 --- /dev/null +++ b/examples/batch_processing/sam/sqs_batch_processing.yaml @@ -0,0 +1,42 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: partial batch response sample + +Globals: + Function: + Timeout: 5 + MemorySize: 256 + Runtime: python3.10 + Tracing: Active + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_SERVICE_NAME: hello + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: hello_world + Policies: + - SQSPollerPolicy: + QueueName: !GetAtt SampleQueue.QueueName + Events: + Batch: + Type: SQS + Properties: + Queue: !GetAtt SampleQueue.Arn + FunctionResponseTypes: + - ReportBatchItemFailures + + SampleDLQ: + Type: AWS::SQS::Queue + + SampleQueue: + Type: AWS::SQS::Queue + Properties: + VisibilityTimeout: 30 # Fn timeout * 6 + RedrivePolicy: + maxReceiveCount: 2 + deadLetterTargetArn: !GetAtt SampleDLQ.Arn diff --git a/examples/batch_processing/src/advanced_accessing_lambda_context.py b/examples/batch_processing/src/advanced_accessing_lambda_context.py index 96d95ca5445..b0e7eeb98af 100644 --- a/examples/batch_processing/src/advanced_accessing_lambda_context.py +++ b/examples/batch_processing/src/advanced_accessing_lambda_context.py @@ -1,4 +1,3 @@ -import json from typing import Optional from aws_lambda_powertools import Logger, Tracer @@ -17,11 +16,9 @@ @tracer.capture_method def record_handler(record: SQSRecord, lambda_context: Optional[LambdaContext] = None): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - logger.info(item) - ... + if lambda_context is not None: + remaining_time = lambda_context.get_remaining_time_in_millis() + logger.info(remaining_time) @logger.inject_lambda_context diff --git a/examples/batch_processing/src/advanced_accessing_lambda_context_decorator.py b/examples/batch_processing/src/advanced_accessing_lambda_context_decorator.py new file mode 100644 index 00000000000..267e9ddbd62 --- /dev/null +++ b/examples/batch_processing/src/advanced_accessing_lambda_context_decorator.py @@ -0,0 +1,28 @@ +from typing import Optional + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import ( + BatchProcessor, + EventType, + batch_processor, +) +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: SQSRecord, lambda_context: Optional[LambdaContext] = None): + if lambda_context is not None: + remaining_time = lambda_context.get_remaining_time_in_millis() + logger.info(remaining_time) + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/examples/batch_processing/src/advanced_accessing_lambda_context_manager.py b/examples/batch_processing/src/advanced_accessing_lambda_context_manager.py new file mode 100644 index 00000000000..17b719a84d4 --- /dev/null +++ b/examples/batch_processing/src/advanced_accessing_lambda_context_manager.py @@ -0,0 +1,27 @@ +from typing import Optional + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: SQSRecord, lambda_context: Optional[LambdaContext] = None): + if lambda_context is not None: + remaining_time = lambda_context.get_remaining_time_in_millis() + logger.info(remaining_time) + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, handler=record_handler, lambda_context=context): + result = processor.process() + + return result diff --git a/examples/batch_processing/src/context_manager_access.py b/examples/batch_processing/src/context_manager_access.py new file mode 100644 index 00000000000..9882092bd83 --- /dev/null +++ b/examples/batch_processing/src/context_manager_access.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +import json +from typing import List, Tuple + +from typing_extensions import Literal + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + logger.info(item) + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, handler=record_handler): + processed_messages: List[Tuple] = processor.process() + + for message in processed_messages: + status: Literal["success"] | Literal["fail"] = message[0] + record: SQSRecord = message[2] + + logger.info(status, record=record) + + return processor.response() diff --git a/examples/batch_processing/src/custom_partial_processor.py b/examples/batch_processing/src/custom_partial_processor.py new file mode 100644 index 00000000000..353f612e7cc --- /dev/null +++ b/examples/batch_processing/src/custom_partial_processor.py @@ -0,0 +1,76 @@ +import os +import sys +from random import randint +from typing import Any + +import boto3 + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.batch import ( + BasePartialBatchProcessor, + EventType, + process_partial_response, +) + +table_name = os.getenv("TABLE_NAME", "table_not_found") + +logger = Logger() + + +class MyPartialProcessor(BasePartialBatchProcessor): + """ + Process a record and stores successful results at a Amazon DynamoDB Table + + Parameters + ---------- + table_name: str + DynamoDB table name to write results to + """ + + def __init__(self, table_name: str): + self.table_name = table_name + + super().__init__(event_type=EventType.SQS) + + def _prepare(self): + # It's called once, *before* processing + # Creates table resource and clean previous results + self.ddb_table = boto3.resource("dynamodb").Table(self.table_name) + self.success_messages.clear() + + def _clean(self): + # It's called once, *after* closing processing all records (closing the context manager) + # Here we're sending, at once, all successful messages to a ddb table + with self.ddb_table.batch_writer() as batch: + for result in self.success_messages: + batch.put_item(Item=result) + + def _process_record(self, record): + # It handles how your record is processed + # Here we're keeping the status of each run + # where self.handler is the record_handler function passed as an argument + try: + result = self.handler(record) # record_handler passed to decorator/context manager + return self.success_handler(record, result) + except Exception as exc: + logger.error(exc) + return self.failure_handler(record, sys.exc_info()) + + def success_handler(self, record, result: Any): + entry = ("success", result, record) + self.success_messages.append(record) + return entry + + async def _async_process_record(self, record: dict): + raise NotImplementedError() + + +processor = MyPartialProcessor(table_name) + + +def record_handler(record): + return randint(0, 100) + + +def lambda_handler(event, context): + return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context) diff --git a/examples/batch_processing/src/disable_tracing.py b/examples/batch_processing/src/disable_tracing.py new file mode 100644 index 00000000000..c8967044f74 --- /dev/null +++ b/examples/batch_processing/src/disable_tracing.py @@ -0,0 +1,28 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import ( + BatchProcessor, + EventType, + process_partial_response, +) +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method(capture_response=False) +def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + logger.info(item) + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context) diff --git a/examples/batch_processing/src/extending_failure.py b/examples/batch_processing/src/extending_failure.py new file mode 100644 index 00000000000..424c9a5189b --- /dev/null +++ b/examples/batch_processing/src/extending_failure.py @@ -0,0 +1,38 @@ +import json + +from aws_lambda_powertools import Logger, Metrics, Tracer +from aws_lambda_powertools.metrics import MetricUnit +from aws_lambda_powertools.utilities.batch import ( + BatchProcessor, + EventType, + ExceptionInfo, + FailureResponse, + process_partial_response, +) +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + + +class MyProcessor(BatchProcessor): + def failure_handler(self, record: SQSRecord, exception: ExceptionInfo) -> FailureResponse: + metrics.add_metric(name="BatchRecordFailures", unit=MetricUnit.Count, value=1) + return super().failure_handler(record, exception) + + +processor = MyProcessor(event_type=EventType.SQS) +metrics = Metrics(namespace="test") +logger = Logger() +tracer = Tracer() + + +@tracer.capture_method +def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + logger.info(item) + + +@metrics.log_metrics(capture_cold_start_metric=True) +def lambda_handler(event, context: LambdaContext): + return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context) diff --git a/examples/batch_processing/src/getting_started_dynamodb.py b/examples/batch_processing/src/getting_started_dynamodb.py index 60d8ed89f0e..61990e2bd26 100644 --- a/examples/batch_processing/src/getting_started_dynamodb.py +++ b/examples/batch_processing/src/getting_started_dynamodb.py @@ -18,10 +18,12 @@ @tracer.capture_method def record_handler(record: DynamoDBRecord): - logger.info(record.dynamodb.new_image) # type: ignore[union-attr] - payload: dict = json.loads(record.dynamodb.new_image.get("Message")) # type: ignore[union-attr,arg-type] - logger.info(payload) - ... + if record.dynamodb and record.dynamodb.new_image: + logger.info(record.dynamodb.new_image) + message = record.dynamodb.new_image.get("Message") + if message: + payload: dict = json.loads(message) + logger.info(payload) @logger.inject_lambda_context diff --git a/examples/batch_processing/src/getting_started_dynamodb_context_manager.py b/examples/batch_processing/src/getting_started_dynamodb_context_manager.py new file mode 100644 index 00000000000..155e1354551 --- /dev/null +++ b/examples/batch_processing/src/getting_started_dynamodb_context_manager.py @@ -0,0 +1,33 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( + DynamoDBRecord, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.DynamoDBStreams) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: DynamoDBRecord): + if record.dynamodb and record.dynamodb.new_image: + logger.info(record.dynamodb.new_image) + message = record.dynamodb.new_image.get("Message") + if message: + payload: dict = json.loads(message) + logger.info(payload) + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, handler=record_handler): + processed_messages = processor.process() # kick off processing, return list[tuple] + logger.info(f"Processed ${len(processed_messages)} messages") + + return processor.response() diff --git a/examples/batch_processing/src/getting_started_dynamodb_decorator.py b/examples/batch_processing/src/getting_started_dynamodb_decorator.py new file mode 100644 index 00000000000..a2df6a11f8c --- /dev/null +++ b/examples/batch_processing/src/getting_started_dynamodb_decorator.py @@ -0,0 +1,33 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import ( + BatchProcessor, + EventType, + batch_processor, +) +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( + DynamoDBRecord, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.DynamoDBStreams) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: DynamoDBRecord): + if record.dynamodb and record.dynamodb.new_image: + logger.info(record.dynamodb.new_image) + message = record.dynamodb.new_image.get("Message") + if message: + payload: dict = json.loads(message) + logger.info(payload) + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/examples/batch_processing/src/getting_started_dynamodb_event.json b/examples/batch_processing/src/getting_started_dynamodb_event.json new file mode 100644 index 00000000000..2508a6f0b67 --- /dev/null +++ b/examples/batch_processing/src/getting_started_dynamodb_event.json @@ -0,0 +1,51 @@ + +{ + "Records": [ + { + "eventID": "1", + "eventVersion": "1.0", + "dynamodb": { + "Keys": { + "Id": { + "N": "101" + } + }, + "NewImage": { + "Message": { + "S": "failure" + } + }, + "StreamViewType": "NEW_AND_OLD_IMAGES", + "SequenceNumber": "3275880929", + "SizeBytes": 26 + }, + "awsRegion": "us-west-2", + "eventName": "INSERT", + "eventSourceARN": "eventsource_arn", + "eventSource": "aws:dynamodb" + }, + { + "eventID": "1", + "eventVersion": "1.0", + "dynamodb": { + "Keys": { + "Id": { + "N": "101" + } + }, + "NewImage": { + "SomethingElse": { + "S": "success" + } + }, + "StreamViewType": "NEW_AND_OLD_IMAGES", + "SequenceNumber": "8640712661", + "SizeBytes": 26 + }, + "awsRegion": "us-west-2", + "eventName": "INSERT", + "eventSourceARN": "eventsource_arn", + "eventSource": "aws:dynamodb" + } + ] +} diff --git a/examples/batch_processing/src/getting_started_dynamodb_response.json b/examples/batch_processing/src/getting_started_dynamodb_response.json new file mode 100644 index 00000000000..9ccbde9ba9f --- /dev/null +++ b/examples/batch_processing/src/getting_started_dynamodb_response.json @@ -0,0 +1,7 @@ +{ + "batchItemFailures": [ + { + "itemIdentifier": "8640712661" + } + ] +} diff --git a/examples/batch_processing/src/getting_started_kinesis.py b/examples/batch_processing/src/getting_started_kinesis.py index e58222733e1..179154e3b1f 100644 --- a/examples/batch_processing/src/getting_started_kinesis.py +++ b/examples/batch_processing/src/getting_started_kinesis.py @@ -19,7 +19,6 @@ def record_handler(record: KinesisStreamRecord): logger.info(record.kinesis.data_as_text) payload: dict = record.kinesis.data_as_json() logger.info(payload) - ... @logger.inject_lambda_context diff --git a/examples/batch_processing/src/getting_started_kinesis_context_manager.py b/examples/batch_processing/src/getting_started_kinesis_context_manager.py new file mode 100644 index 00000000000..8af0a9e52cf --- /dev/null +++ b/examples/batch_processing/src/getting_started_kinesis_context_manager.py @@ -0,0 +1,28 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType +from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import ( + KinesisStreamRecord, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.KinesisDataStreams) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: KinesisStreamRecord): + logger.info(record.kinesis.data_as_text) + payload: dict = record.kinesis.data_as_json() + logger.info(payload) + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, handler=record_handler): + processed_messages = processor.process() # kick off processing, return list[tuple] + logger.info(f"Processed ${len(processed_messages)} messages") + + return processor.response() diff --git a/examples/batch_processing/src/getting_started_kinesis_decorator.py b/examples/batch_processing/src/getting_started_kinesis_decorator.py new file mode 100644 index 00000000000..107c94ffbad --- /dev/null +++ b/examples/batch_processing/src/getting_started_kinesis_decorator.py @@ -0,0 +1,28 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import ( + BatchProcessor, + EventType, + batch_processor, +) +from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import ( + KinesisStreamRecord, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.KinesisDataStreams) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: KinesisStreamRecord): + logger.info(record.kinesis.data_as_text) + payload: dict = record.kinesis.data_as_json() + logger.info(payload) + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/examples/batch_processing/src/getting_started_kinesis_event.json b/examples/batch_processing/src/getting_started_kinesis_event.json new file mode 100644 index 00000000000..2721ad7d9a7 --- /dev/null +++ b/examples/batch_processing/src/getting_started_kinesis_event.json @@ -0,0 +1,36 @@ +{ + "Records": [ + { + "kinesis": { + "kinesisSchemaVersion": "1.0", + "partitionKey": "1", + "sequenceNumber": "4107859083838847772757075850904226111829882106684065", + "data": "eyJNZXNzYWdlIjogInN1Y2Nlc3MifQ==", + "approximateArrivalTimestamp": 1545084650.987 + }, + "eventSource": "aws:kinesis", + "eventVersion": "1.0", + "eventID": "shardId-000000000006:4107859083838847772757075850904226111829882106684065", + "eventName": "aws:kinesis:record", + "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", + "awsRegion": "us-east-2", + "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" + }, + { + "kinesis": { + "kinesisSchemaVersion": "1.0", + "partitionKey": "1", + "sequenceNumber": "6006958808509702859251049540584488075644979031228738", + "data": "c3VjY2Vzcw==", + "approximateArrivalTimestamp": 1545084650.987 + }, + "eventSource": "aws:kinesis", + "eventVersion": "1.0", + "eventID": "shardId-000000000006:6006958808509702859251049540584488075644979031228738", + "eventName": "aws:kinesis:record", + "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", + "awsRegion": "us-east-2", + "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" + } + ] +} diff --git a/examples/batch_processing/src/getting_started_kinesis_response.json b/examples/batch_processing/src/getting_started_kinesis_response.json new file mode 100644 index 00000000000..7ebd013d7f7 --- /dev/null +++ b/examples/batch_processing/src/getting_started_kinesis_response.json @@ -0,0 +1,7 @@ +{ + "batchItemFailures": [ + { + "itemIdentifier": "6006958808509702859251049540584488075644979031228738" + } + ] +} diff --git a/examples/batch_processing/src/getting_started_sqs.py b/examples/batch_processing/src/getting_started_sqs.py index 15f8701f297..8b6fe4c4266 100644 --- a/examples/batch_processing/src/getting_started_sqs.py +++ b/examples/batch_processing/src/getting_started_sqs.py @@ -20,7 +20,6 @@ def record_handler(record: SQSRecord): if payload: item: dict = json.loads(payload) logger.info(item) - ... @logger.inject_lambda_context diff --git a/examples/batch_processing/src/getting_started_sqs_context_manager.py b/examples/batch_processing/src/getting_started_sqs_context_manager.py new file mode 100644 index 00000000000..19c14dfdd76 --- /dev/null +++ b/examples/batch_processing/src/getting_started_sqs_context_manager.py @@ -0,0 +1,29 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + logger.info(item) + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, handler=record_handler): + processed_messages = processor.process() # kick off processing, return list[tuple] + logger.info(f"Processed ${len(processed_messages)} messages") + + return processor.response() diff --git a/examples/batch_processing/src/getting_started_sqs_decorator.py b/examples/batch_processing/src/getting_started_sqs_decorator.py new file mode 100644 index 00000000000..4f058beb862 --- /dev/null +++ b/examples/batch_processing/src/getting_started_sqs_decorator.py @@ -0,0 +1,29 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import ( + BatchProcessor, + EventType, + batch_processor, +) +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + logger.info(item) + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/examples/batch_processing/src/getting_started_sqs_event.json b/examples/batch_processing/src/getting_started_sqs_event.json new file mode 100644 index 00000000000..50a411be861 --- /dev/null +++ b/examples/batch_processing/src/getting_started_sqs_event.json @@ -0,0 +1,36 @@ +{ + "Records": [ + { + "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", + "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", + "body": "{\"Message\": \"success\"}", + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1545082649183", + "SenderId": "AIDAIENQZJOLO23YVJ4VO", + "ApproximateFirstReceiveTimestamp": "1545082649185" + }, + "messageAttributes": {}, + "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-2: 123456789012:my-queue", + "awsRegion": "us-east-1" + }, + { + "messageId": "244fc6b4-87a3-44ab-83d2-361172410c3a", + "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", + "body": "SGVsbG8sIHRoaXMgaXMgYSB0ZXN0Lg==", + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1545082649183", + "SenderId": "AIDAIENQZJOLO23YVJ4VO", + "ApproximateFirstReceiveTimestamp": "1545082649185" + }, + "messageAttributes": {}, + "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-2: 123456789012:my-queue", + "awsRegion": "us-east-1" + } + ] +} diff --git a/examples/batch_processing/src/getting_started_sqs_fifo.py b/examples/batch_processing/src/getting_started_sqs_fifo.py index d39f8ba63f1..d30fb319c85 100644 --- a/examples/batch_processing/src/getting_started_sqs_fifo.py +++ b/examples/batch_processing/src/getting_started_sqs_fifo.py @@ -1,3 +1,5 @@ +import json + from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.utilities.batch import ( SqsFifoPartialProcessor, @@ -13,7 +15,10 @@ @tracer.capture_method def record_handler(record: SQSRecord): - ... + payload: str = record.body + if payload: + item: dict = json.loads(payload) + logger.info(item) @logger.inject_lambda_context diff --git a/examples/batch_processing/src/getting_started_sqs_fifo_context_manager.py b/examples/batch_processing/src/getting_started_sqs_fifo_context_manager.py index 45759b2a585..310cc3b9839 100644 --- a/examples/batch_processing/src/getting_started_sqs_fifo_context_manager.py +++ b/examples/batch_processing/src/getting_started_sqs_fifo_context_manager.py @@ -1,3 +1,5 @@ +import json + from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.utilities.batch import SqsFifoPartialProcessor from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord @@ -10,7 +12,10 @@ @tracer.capture_method def record_handler(record: SQSRecord): - ... + payload: str = record.body + if payload: + item: dict = json.loads(payload) + logger.info(item) @logger.inject_lambda_context diff --git a/examples/batch_processing/src/getting_started_sqs_fifo_decorator.py b/examples/batch_processing/src/getting_started_sqs_fifo_decorator.py index a5fe9f23235..22448d2ce8a 100644 --- a/examples/batch_processing/src/getting_started_sqs_fifo_decorator.py +++ b/examples/batch_processing/src/getting_started_sqs_fifo_decorator.py @@ -1,3 +1,5 @@ +import json + from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.utilities.batch import ( SqsFifoPartialProcessor, @@ -13,7 +15,10 @@ @tracer.capture_method def record_handler(record: SQSRecord): - ... + payload: str = record.body + if payload: + item: dict = json.loads(payload) + logger.info(item) @logger.inject_lambda_context diff --git a/examples/batch_processing/src/getting_started_sqs_response.json b/examples/batch_processing/src/getting_started_sqs_response.json new file mode 100644 index 00000000000..9802316a689 --- /dev/null +++ b/examples/batch_processing/src/getting_started_sqs_response.json @@ -0,0 +1,7 @@ +{ + "batchItemFailures": [ + { + "itemIdentifier": "244fc6b4-87a3-44ab-83d2-361172410c3a" + } + ] +} diff --git a/examples/batch_processing/src/getting_started_with_test.py b/examples/batch_processing/src/getting_started_with_test.py new file mode 100644 index 00000000000..49e78269248 --- /dev/null +++ b/examples/batch_processing/src/getting_started_with_test.py @@ -0,0 +1,45 @@ +import json +from dataclasses import dataclass +from pathlib import Path + +import pytest +from getting_started_with_test_app import lambda_handler, processor + + +def load_event(path: Path): + with path.open() as f: + return json.load(f) + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" + aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" + + return LambdaContext() + + +@pytest.fixture() +def sqs_event(): + """Generates API GW Event""" + return load_event(path=Path("events/sqs_event.json")) + + +def test_app_batch_partial_response(sqs_event, lambda_context): + # GIVEN + processor_result = processor # access processor for additional assertions + successful_record = sqs_event["Records"][0] + failed_record = sqs_event["Records"][1] + expected_response = {"batchItemFailures": [{"itemIdentifier": failed_record["messageId"]}]} + + # WHEN + ret = lambda_handler(sqs_event, lambda_context) + + # THEN + assert ret == expected_response + assert len(processor_result.fail_messages) == 1 + assert processor_result.success_messages[0] == successful_record diff --git a/examples/batch_processing/src/getting_started_with_test_app.py b/examples/batch_processing/src/getting_started_with_test_app.py new file mode 100644 index 00000000000..8b6fe4c4266 --- /dev/null +++ b/examples/batch_processing/src/getting_started_with_test_app.py @@ -0,0 +1,28 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import ( + BatchProcessor, + EventType, + process_partial_response, +) +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + logger.info(item) + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context) diff --git a/examples/batch_processing/src/pydantic_dynamodb.py b/examples/batch_processing/src/pydantic_dynamodb.py new file mode 100644 index 00000000000..dbd5cff24c4 --- /dev/null +++ b/examples/batch_processing/src/pydantic_dynamodb.py @@ -0,0 +1,58 @@ +import json +from typing import Dict, Optional + +from typing_extensions import Literal + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import ( + BatchProcessor, + EventType, + process_partial_response, +) +from aws_lambda_powertools.utilities.parser import BaseModel, validator +from aws_lambda_powertools.utilities.parser.models import ( + DynamoDBStreamChangedRecordModel, + DynamoDBStreamRecordModel, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + + +class Order(BaseModel): + item: dict + + +class OrderDynamoDB(BaseModel): + Message: Order + + # auto transform json string + # so Pydantic can auto-initialize nested Order model + @validator("Message", pre=True) + def transform_message_to_dict(cls, value: Dict[Literal["S"], str]): + return json.loads(value["S"]) + + +class OrderDynamoDBChangeRecord(DynamoDBStreamChangedRecordModel): + NewImage: Optional[OrderDynamoDB] + OldImage: Optional[OrderDynamoDB] + + +class OrderDynamoDBRecord(DynamoDBStreamRecordModel): + dynamodb: OrderDynamoDBChangeRecord + + +processor = BatchProcessor(event_type=EventType.DynamoDBStreams, model=OrderDynamoDBRecord) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: OrderDynamoDBRecord): + if record.dynamodb.NewImage and record.dynamodb.NewImage.Message: + logger.info(record.dynamodb.NewImage.Message.item) + return record.dynamodb.NewImage.Message.item + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context) diff --git a/examples/batch_processing/src/pydantic_dynamodb_event.json b/examples/batch_processing/src/pydantic_dynamodb_event.json new file mode 100644 index 00000000000..40a8977e7bd --- /dev/null +++ b/examples/batch_processing/src/pydantic_dynamodb_event.json @@ -0,0 +1,50 @@ +{ + "Records": [ + { + "eventID": "1", + "eventVersion": "1.0", + "dynamodb": { + "Keys": { + "Id": { + "N": "101" + } + }, + "NewImage": { + "Message": { + "S": "{\"item\": {\"laptop\": \"amd\"}}" + } + }, + "StreamViewType": "NEW_AND_OLD_IMAGES", + "SequenceNumber": "3275880929", + "SizeBytes": 26 + }, + "awsRegion": "us-west-2", + "eventName": "INSERT", + "eventSourceARN": "eventsource_arn", + "eventSource": "aws:dynamodb" + }, + { + "eventID": "1", + "eventVersion": "1.0", + "dynamodb": { + "Keys": { + "Id": { + "N": "101" + } + }, + "NewImage": { + "SomethingElse": { + "S": "success" + } + }, + "StreamViewType": "NEW_AND_OLD_IMAGES", + "SequenceNumber": "8640712661", + "SizeBytes": 26 + }, + "awsRegion": "us-west-2", + "eventName": "INSERT", + "eventSourceARN": "eventsource_arn", + "eventSource": "aws:dynamodb" + } + ] + } diff --git a/examples/batch_processing/src/pydantic_kinesis.py b/examples/batch_processing/src/pydantic_kinesis.py new file mode 100644 index 00000000000..012f67a9b35 --- /dev/null +++ b/examples/batch_processing/src/pydantic_kinesis.py @@ -0,0 +1,42 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import ( + BatchProcessor, + EventType, + process_partial_response, +) +from aws_lambda_powertools.utilities.parser import BaseModel +from aws_lambda_powertools.utilities.parser.models import ( + KinesisDataStreamRecord, + KinesisDataStreamRecordPayload, +) +from aws_lambda_powertools.utilities.parser.types import Json +from aws_lambda_powertools.utilities.typing import LambdaContext + + +class Order(BaseModel): + item: dict + + +class OrderKinesisPayloadRecord(KinesisDataStreamRecordPayload): + data: Json[Order] + + +class OrderKinesisRecord(KinesisDataStreamRecord): + kinesis: OrderKinesisPayloadRecord + + +processor = BatchProcessor(event_type=EventType.KinesisDataStreams, model=OrderKinesisRecord) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: OrderKinesisRecord): + logger.info(record.kinesis.data.item) + return record.kinesis.data.item + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context) diff --git a/examples/batch_processing/src/pydantic_kinesis_event.json b/examples/batch_processing/src/pydantic_kinesis_event.json new file mode 100644 index 00000000000..0679115dd65 --- /dev/null +++ b/examples/batch_processing/src/pydantic_kinesis_event.json @@ -0,0 +1,36 @@ +{ + "Records": [ + { + "kinesis": { + "kinesisSchemaVersion": "1.0", + "partitionKey": "1", + "sequenceNumber": "4107859083838847772757075850904226111829882106684065", + "data": "eyJpdGVtIjogeyJsYXB0b3AiOiAiYW1kIn19Cg==", + "approximateArrivalTimestamp": 1545084650.987 + }, + "eventSource": "aws:kinesis", + "eventVersion": "1.0", + "eventID": "shardId-000000000006:4107859083838847772757075850904226111829882106684065", + "eventName": "aws:kinesis:record", + "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", + "awsRegion": "us-east-2", + "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" + }, + { + "kinesis": { + "kinesisSchemaVersion": "1.0", + "partitionKey": "1", + "sequenceNumber": "6006958808509702859251049540584488075644979031228738", + "data": "eyJpdGVtIjogeyJrZXlib2FyZCI6ICJjbGFzc2ljIn19Cg==", + "approximateArrivalTimestamp": 1545084650.987 + }, + "eventSource": "aws:kinesis", + "eventVersion": "1.0", + "eventID": "shardId-000000000006:6006958808509702859251049540584488075644979031228738", + "eventName": "aws:kinesis:record", + "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", + "awsRegion": "us-east-2", + "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" + } + ] + } diff --git a/examples/batch_processing/src/pydantic_sqs.py b/examples/batch_processing/src/pydantic_sqs.py new file mode 100644 index 00000000000..0e82a304e4e --- /dev/null +++ b/examples/batch_processing/src/pydantic_sqs.py @@ -0,0 +1,35 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import ( + BatchProcessor, + EventType, + process_partial_response, +) +from aws_lambda_powertools.utilities.parser import BaseModel +from aws_lambda_powertools.utilities.parser.models import SqsRecordModel +from aws_lambda_powertools.utilities.parser.types import Json +from aws_lambda_powertools.utilities.typing import LambdaContext + + +class Order(BaseModel): + item: dict + + +class OrderSqsRecord(SqsRecordModel): + body: Json[Order] # deserialize order data from JSON string + + +processor = BatchProcessor(event_type=EventType.SQS, model=OrderSqsRecord) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: OrderSqsRecord): + logger.info(record.body.item) + return record.body.item + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context) diff --git a/examples/batch_processing/src/pydantic_sqs_event.json b/examples/batch_processing/src/pydantic_sqs_event.json new file mode 100644 index 00000000000..c3f26d074b1 --- /dev/null +++ b/examples/batch_processing/src/pydantic_sqs_event.json @@ -0,0 +1,36 @@ +{ + "Records": [ + { + "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", + "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", + "body": "{\"item\": {\"laptop\": \"amd\"}}", + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1545082649183", + "SenderId": "AIDAIENQZJOLO23YVJ4VO", + "ApproximateFirstReceiveTimestamp": "1545082649185" + }, + "messageAttributes": {}, + "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-2: 123456789012:my-queue", + "awsRegion": "us-east-1" + }, + { + "messageId": "244fc6b4-87a3-44ab-83d2-361172410c3a", + "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", + "body": "{\"item\": {\"keyboard\": \"classic\"}}", + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1545082649183", + "SenderId": "AIDAIENQZJOLO23YVJ4VO", + "ApproximateFirstReceiveTimestamp": "1545082649185" + }, + "messageAttributes": {}, + "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-2: 123456789012:my-queue", + "awsRegion": "us-east-1" + } + ] + } diff --git a/examples/batch_processing/src/sentry_error_tracking.py b/examples/batch_processing/src/sentry_error_tracking.py new file mode 100644 index 00000000000..59ec8262cdb --- /dev/null +++ b/examples/batch_processing/src/sentry_error_tracking.py @@ -0,0 +1,9 @@ +from sentry_sdk import capture_exception + +from aws_lambda_powertools.utilities.batch import BatchProcessor, FailureResponse + + +class MyProcessor(BatchProcessor): + def failure_handler(self, record, exception) -> FailureResponse: + capture_exception() # send exception to Sentry + return super().failure_handler(record, exception) diff --git a/poetry.lock b/poetry.lock index 548fb79f616..5f24792ab66 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "anyio" @@ -2655,6 +2655,48 @@ files = [ attrs = "*" pbr = "*" +[[package]] +name = "sentry-sdk" +version = "1.22.2" +description = "Python client for Sentry (https://sentry.io)" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "sentry-sdk-1.22.2.tar.gz", hash = "sha256:5932c092c6e6035584eb74d77064e4bce3b7935dfc4a331349719a40db265840"}, + {file = "sentry_sdk-1.22.2-py2.py3-none-any.whl", hash = "sha256:cf89a5063ef84278d186aceaed6fb595bfe67d099298e537634a323664265669"}, +] + +[package.dependencies] +certifi = "*" +urllib3 = {version = ">=1.26.11,<2.0.0", markers = "python_version >= \"3.6\""} + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +arq = ["arq (>=0.23)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +chalice = ["chalice (>=1.16.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)"] +grpcio = ["grpcio (>=1.21.1)"] +httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +pure-eval = ["asttokens", "executing", "pure-eval"] +pymongo = ["pymongo (>=3.1)"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] +tornado = ["tornado (>=5)"] + [[package]] name = "six" version = "1.16.0" @@ -3026,7 +3068,7 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] +all = ["aws-xray-sdk", "fastjsonschema", "pydantic"] aws-sdk = ["boto3"] parser = ["pydantic"] tracer = ["aws-xray-sdk"] @@ -3035,4 +3077,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "0c140ce333e0131b6cf5fee17b8cba631dfd3bbd3ee5f8ab66175bfeed493842" +content-hash = "fe686c11217e31bf5fd24895d0224fc3e44c9da4192c6b64d245af1c8033a9cd" diff --git a/pyproject.toml b/pyproject.toml index f2613d3c36d..006dd298516 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -105,6 +105,7 @@ cfn-lint = "0.77.5" mypy = "^1.1.1" types-python-dateutil = "^2.8.19.6" httpx = ">=0.23.3,<0.25.0" +sentry-sdk = "^1.22.2" [tool.coverage.run] source = ["aws_lambda_powertools"] From 8a3f8cfa18491ea6181763c5133c2b9ac941c55d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 May 2023 17:27:18 +0200 Subject: [PATCH 22/76] chore(deps-dev): bump aws-cdk from 2.78.0 to 2.79.0 (#2235) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 14 +++++++------- package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/package-lock.json b/package-lock.json index 37bee4fb126..1d19c8115e2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,13 +8,13 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.78.0" + "aws-cdk": "^2.79.0" } }, "node_modules/aws-cdk": { - "version": "2.78.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.78.0.tgz", - "integrity": "sha512-fDqImTHefBjr8RYduO0bQRkINYJRGKdTXABeLsaMP6Ff4qDXTymaplyvUxNSB9DlQ+oXQ/aJgqFGiIJXSr+kpg==", + "version": "2.79.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.79.0.tgz", + "integrity": "sha512-pVOw5QBbtmSA+PQmGwSR1qbtgC0caC6+Vc6bCZM4aK9nTh7iUXv4MKqXCUfxN6waNgYpweUCb29gxo6DOe99Yw==", "dev": true, "bin": { "cdk": "bin/cdk" @@ -43,9 +43,9 @@ }, "dependencies": { "aws-cdk": { - "version": "2.78.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.78.0.tgz", - "integrity": "sha512-fDqImTHefBjr8RYduO0bQRkINYJRGKdTXABeLsaMP6Ff4qDXTymaplyvUxNSB9DlQ+oXQ/aJgqFGiIJXSr+kpg==", + "version": "2.79.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.79.0.tgz", + "integrity": "sha512-pVOw5QBbtmSA+PQmGwSR1qbtgC0caC6+Vc6bCZM4aK9nTh7iUXv4MKqXCUfxN6waNgYpweUCb29gxo6DOe99Yw==", "dev": true, "requires": { "fsevents": "2.3.2" diff --git a/package.json b/package.json index 18f7afaf4bd..68477155422 100644 --- a/package.json +++ b/package.json @@ -2,6 +2,6 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.78.0" + "aws-cdk": "^2.79.0" } } From 0a4cf7a5c12fb0e5cb1796582557d8367d86ad90 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 12 May 2023 11:51:48 +0100 Subject: [PATCH 23/76] chore(ci): changelog rebuild (#2241) Co-authored-by: Release bot --- CHANGELOG.md | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a63192360e9..1193ad22546 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,22 +4,29 @@ # Unreleased +## Documentation + +* **batch_processing:** snippets split, improved, and lint ([#2231](https://github.com/awslabs/aws-lambda-powertools-python/issues/2231)) + ## Maintenance -* **ci:** schedule changelog to rebuild daily at 8am, and on release only ([#2216](https://github.com/awslabs/aws-lambda-powertools-python/issues/2216)) -* **ci:** changelog rebuild ([#2232](https://github.com/awslabs/aws-lambda-powertools-python/issues/2232)) -* **ci:** changelog rebuild ([#2230](https://github.com/awslabs/aws-lambda-powertools-python/issues/2230)) +* **ci:** skip analytics on forks ([#2225](https://github.com/awslabs/aws-lambda-powertools-python/issues/2225)) * **ci:** enforce zero trust for third party workflows ([#2215](https://github.com/awslabs/aws-lambda-powertools-python/issues/2215)) +* **ci:** changelog rebuild ([#2236](https://github.com/awslabs/aws-lambda-powertools-python/issues/2236)) * **ci:** remove auto-merge workflow ([#2214](https://github.com/awslabs/aws-lambda-powertools-python/issues/2214)) -* **ci:** skip analytics on forks ([#2225](https://github.com/awslabs/aws-lambda-powertools-python/issues/2225)) +* **ci:** changelog rebuild ([#2232](https://github.com/awslabs/aws-lambda-powertools-python/issues/2232)) +* **ci:** changelog rebuild ([#2230](https://github.com/awslabs/aws-lambda-powertools-python/issues/2230)) +* **ci:** schedule changelog to rebuild daily at 8am, and on release only ([#2216](https://github.com/awslabs/aws-lambda-powertools-python/issues/2216)) * **ci:** create pull request on changelog update ([#2224](https://github.com/awslabs/aws-lambda-powertools-python/issues/2224)) * **deps:** bump zgosalvez/github-actions-ensure-sha-pinned-actions from 2.1.2 to 2.1.3 ([#2227](https://github.com/awslabs/aws-lambda-powertools-python/issues/2227)) -* **deps-dev:** bump types-python-dateutil from 2.8.19.12 to 2.8.19.13 ([#2234](https://github.com/awslabs/aws-lambda-powertools-python/issues/2234)) +* **deps-dev:** bump aws-cdk from 2.78.0 to 2.79.0 ([#2235](https://github.com/awslabs/aws-lambda-powertools-python/issues/2235)) +* **deps-dev:** bump cfn-lint from 0.77.4 to 0.77.5 ([#2228](https://github.com/awslabs/aws-lambda-powertools-python/issues/2228)) +* **deps-dev:** bump mypy-boto3-cloudwatch from 1.26.99 to 1.26.127 ([#2219](https://github.com/awslabs/aws-lambda-powertools-python/issues/2219)) * **deps-dev:** bump types-requests from 2.29.0.0 to 2.30.0.0 ([#2220](https://github.com/awslabs/aws-lambda-powertools-python/issues/2220)) * **deps-dev:** bump mypy-boto3-s3 from 1.26.116 to 1.26.127 ([#2218](https://github.com/awslabs/aws-lambda-powertools-python/issues/2218)) -* **deps-dev:** bump mypy-boto3-cloudwatch from 1.26.99 to 1.26.127 ([#2219](https://github.com/awslabs/aws-lambda-powertools-python/issues/2219)) -* **deps-dev:** bump cfn-lint from 0.77.4 to 0.77.5 ([#2228](https://github.com/awslabs/aws-lambda-powertools-python/issues/2228)) * **deps-dev:** bump mkdocs-material from 9.1.9 to 9.1.11 ([#2229](https://github.com/awslabs/aws-lambda-powertools-python/issues/2229)) +* **deps-dev:** bump types-python-dateutil from 2.8.19.12 to 2.8.19.13 ([#2234](https://github.com/awslabs/aws-lambda-powertools-python/issues/2234)) +* **deps-dev:** bump mypy from 1.2.0 to 1.3.0 ([#2233](https://github.com/awslabs/aws-lambda-powertools-python/issues/2233)) * **deps-dev:** bump aws-cdk from 2.77.0 to 2.78.0 ([#2202](https://github.com/awslabs/aws-lambda-powertools-python/issues/2202)) From b4b01378fde39dcdd4bc82d85bf5ef3379d855bd Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 12 May 2023 15:23:50 +0200 Subject: [PATCH 24/76] chore(ci): convert create-pr steps into composite action (#2238) * chore: convert create-pr steps into composite action Signed-off-by: heitorlessa * chore(ci): changelog to use new create-pr action Signed-off-by: heitorlessa * chore(ci): revert changelog to trigger on push Signed-off-by: heitorlessa * chore: document custom action Signed-off-by: heitorlessa * chore: add support for any target branch Signed-off-by: heitorlessa --------- Signed-off-by: heitorlessa --- .github/actions/create-pr/action.yml | 88 +++++++++++ .../create-pr/create_pr_for_staged_changes.sh | 145 ++++++++++++++++++ .../scripts/create_pr_for_staged_changes.sh | 116 -------------- .github/workflows/build_changelog.yml | 14 +- .../workflows/reusable_publish_changelog.yml | 30 ++-- 5 files changed, 246 insertions(+), 147 deletions(-) create mode 100644 .github/actions/create-pr/action.yml create mode 100755 .github/actions/create-pr/create_pr_for_staged_changes.sh delete mode 100644 .github/scripts/create_pr_for_staged_changes.sh diff --git a/.github/actions/create-pr/action.yml b/.github/actions/create-pr/action.yml new file mode 100644 index 00000000000..b7713a6c785 --- /dev/null +++ b/.github/actions/create-pr/action.yml @@ -0,0 +1,88 @@ +name: "Create PR custom action" +description: "Create a PR and a temporary branch, close duplicates" + +# PROCESS +# +# 1. Setup git client using Powertools bot username +# 2. Pushes staged files to a temporary branch +# 3. Creates a PR from temporary branch against a target branch (typically trunk: develop, main, etc.) +# 4. Searches for duplicate PRs with the same title +# 5. If duplicates are found, link to the most recent one, close and delete their branches so we keep a single PR +# 6. In the event of failure, we delete the now orphaned branch (if any), and propagate the failure + +# USAGE +# +# - name: Create PR +# id: create-pr +# uses: ./.github/actions/create-pr +# with: +# files: "CHANGELOG.md" +# temp_branch_prefix: "ci-changelog" +# pull_request_title: "chore(ci): changelog rebuild" +# github_token: ${{ secrets.GITHUB_TOKEN }} +# - name: Step to demonstrate how to access outputs (no need for this) +# run: | +# echo "PR number: ${PR_ID}" +# echo "Branch: ${BRANCH}" +# env: +# PR_ID: ${{ steps.create-pr.outputs.pull_request_id}} +# BRANCH: ${{ steps.create-pr.outputs.temp_branch}} + +inputs: + files: + description: "Files to add separated by space" + required: true + temp_branch_prefix: + description: "Prefix for temporary git branch to be created, e.g, ci-docs" + required: true + pull_request_title: + description: "Pull Request title to use" + required: true + github_token: + description: "GitHub token for GitHub CLI" + required: true + target_branch: + description: "Branch to target when creating a PR against (develop, by default)" + required: false + default: develop + +outputs: + pull_request_id: + description: "Pull request ID created" + value: ${{ steps.create-pr.outputs.pull_request_id }} + temp_branch: + description: "Temporary branch created with staged changed" + value: ${{ steps.create-pr.outputs.temp_branch }} + +runs: + using: "composite" + steps: + - id: adjust-path + run: echo "${{ github.action_path }}" >> $GITHUB_PATH + shell: bash + - id: setup-git + name: Git client setup and refresh tip + run: | + git config user.name "Powertools bot" + git config user.email "aws-lambda-powertools-feedback@amazon.com" + git config pull.rebase true + git config remote.origin.url >&- + shell: bash + - id: create-pr + working-directory: ${{ env.GITHUB_WORKSPACE }} + run: create_pr_for_staged_changes.sh "${FILES}" + env: + FILES: ${{ inputs.files }} + TEMP_BRANCH_PREFIX: ${{ inputs.temp_branch_prefix }} + PR_TITLE: ${{ inputs.pull_request_title }} + BASE_BRANCH: ${{ inputs.target_branch }} + GH_TOKEN: ${{ inputs.github_token }} + shell: bash + - id: cleanup + name: Cleanup orphaned branch + if: failure() + run: git push origin --delete "${TEMP_BRANCH_PREFIX}-${GITHUB_RUN_ID}" || echo "Must have failed before creating temporary branch; no cleanup needed." + env: + TEMP_BRANCH_PREFIX: ${{ inputs.temp_branch_prefix }} + GITHUB_RUN_ID: ${{ github.run_id }} + shell: bash diff --git a/.github/actions/create-pr/create_pr_for_staged_changes.sh b/.github/actions/create-pr/create_pr_for_staged_changes.sh new file mode 100755 index 00000000000..2f32ab24342 --- /dev/null +++ b/.github/actions/create-pr/create_pr_for_staged_changes.sh @@ -0,0 +1,145 @@ +#!/bin/bash +set -uo pipefail # prevent accessing unset env vars, prevent masking pipeline errors to the next command + +#docs +#title :create_pr_for_staged_changes.sh +#description :This script will create a PR for staged changes, detect and close duplicate PRs. +#author :@heitorlessa +#date :May 8th 2023 +#version :0.1 +#usage :bash create_pr_for_staged_changes.sh {git_staged_files_or_directories_separated_by_space} +#notes :Meant to use in GitHub Actions only. Temporary branch will be named $TEMP_BRANCH_PREFIX-$GITHUB_RUN_ID +#os_version :Ubuntu 22.04.2 LTS +#required_env_vars :PR_TITLE, TEMP_BRANCH_PREFIX, GH_TOKEN +#============================================================================== + +# Sets GitHub Action with error message to ease troubleshooting +function error() { + echo "::error file=${FILENAME}::$1" + exit 1 +} + +function debug() { + TIMESTAMP=$(date -u "+%FT%TZ") # 2023-05-10T07:53:59Z + echo ""${TIMESTAMP}" - $1" +} + +function notice() { + echo "::notice file=${FILENAME}::$1" +} + +function start_span() { + echo "::group::$1" +} + +function end_span() { + echo "::endgroup::" +} + +function has_required_config() { + start_span "Validating required config" + test -z "${TEMP_BRANCH_PREFIX}" && error "TEMP_BRANCH_PREFIX env must be set to create a PR" + test -z "${PR_TITLE}" && error "PR_TITLE env must be set" + test -z "${GH_TOKEN}" && error "GH_TOKEN env must be set for GitHub CLI" + + # Default GitHub Actions Env Vars: https://docs.github.com/en/actions/learn-github-actions/variables#default-environment-variables + debug "Are we running in GitHub Action environment?" + test -z "${GITHUB_RUN_ID}" && error "GITHUB_RUN_ID env must be set to trace Workflow Run ID back to PR" + test -z "${GITHUB_SERVER_URL}" && error "GITHUB_SERVER_URL env must be set to trace Workflow Run ID back to PR" + test -z "${GITHUB_REPOSITORY}" && error "GITHUB_REPOSITORY env must be set to trace Workflow Run ID back to PR" + + debug "Config validated successfully!" + set_environment_variables + end_span +} + +function set_environment_variables() { + start_span "Setting environment variables" + export readonly WORKFLOW_URL="${GITHUB_SERVER_URL}"/"${GITHUB_REPOSITORY}"/actions/runs/"${GITHUB_RUN_ID}" # e.g., heitorlessa/aws-lambda-powertools-test/actions/runs/4913570678 + export readonly TEMP_BRANCH="${TEMP_BRANCH_PREFIX}"-"${GITHUB_RUN_ID}" # e.g., ci-changelog-4894658712 + export readonly BASE_BRANCH="${BASE_BRANCH:-develop}" # e.g., main, defaults to develop if missing + export readonly PR_BODY="This is an automated PR created from the following workflow" + export readonly FILENAME=".github/scripts/$(basename "$0")" + export readonly NO_DUPLICATES_MESSAGE="No duplicated PRs found" + end_span +} + +function has_anything_changed() { + start_span "Validating git staged files" + HAS_ANY_SOURCE_CODE_CHANGED="$(git status --porcelain)" + + test -z "${HAS_ANY_SOURCE_CODE_CHANGED}" && debug "Nothing to update; exitting early" && exit 0 + end_span +} + +function create_temporary_branch_with_changes() { + start_span "Creating temporary branch: "${TEMP_BRANCH}"" + git checkout -b "${TEMP_BRANCH}" + + debug "Committing staged files: $*" + echo "$@" | xargs -n1 git add || error "Failed to add staged changes: "$@"" + git commit -m "${PR_TITLE}" + + git push origin "${TEMP_BRANCH}" + end_span +} + +function create_pr() { + start_span "Creating PR against ${TEMP_BRANCH} branch" + NEW_PR_URL=$(gh pr create --title "${PR_TITLE}" --body "${PR_BODY}: ${WORKFLOW_URL}" --base "${BASE_BRANCH}" || error "Failed to create PR") # e.g, https://github.com/awslabs/aws-lambda-powertools/pull/13 + + # greedy remove any string until the last URL path, including the last '/'. https://opensource.com/article/17/6/bash-parameter-expansion + debug "Extracing PR Number from PR URL: "${NEW_PR_URL}"" + NEW_PR_ID="${NEW_PR_URL##*/}" # 13 + export NEW_PR_URL + export NEW_PR_ID + end_span +} + +function close_duplicate_prs() { + start_span "Searching for duplicate PRs" + DUPLICATE_PRS=$(gh pr list --search "${PR_TITLE}" --json number --jq ".[] | select(.number != ${NEW_PR_ID}) | .number") # e.g, 13\n14 + + if [ -z "${DUPLICATE_PRS}" ]; then + debug "No duplicate PRs found" + DUPLICATE_PRS="${NO_DUPLICATES_MESSAGE}" + else + debug "Closing duplicated PRs: "${DUPLICATE_PRS}"" + echo "${DUPLICATE_PRS}" | xargs -L1 gh pr close --delete-branch --comment "Superseded by #${NEW_PR_ID}" + fi + + export readonly DUPLICATE_PRS + end_span +} + +function report_job_output() { + start_span "Updating job outputs" + echo pull_request_id="${NEW_PR_ID}" >>"$GITHUB_OUTPUT" + echo temp_branch="${TEMP_BRANCH}" >>"$GITHUB_OUTPUT" + end_span +} + +function report_summary() { + start_span "Creating job summary" + echo "### Pull request created successfully :rocket: ${NEW_PR_URL}

Closed duplicated PRs: ${DUPLICATE_PRS}" >>"$GITHUB_STEP_SUMMARY" + + notice "PR_URL is: ${NEW_PR_URL}" + notice "PR_BRANCH is: ${TEMP_BRANCH}" + notice "PR_DUPLICATES are: ${DUPLICATE_PRS}" + end_span +} + +function main() { + # Sanity check + has_anything_changed + has_required_config + + create_temporary_branch_with_changes "$@" + create_pr + close_duplicate_prs + + report_job_output + report_summary +} + +main "$@" diff --git a/.github/scripts/create_pr_for_staged_changes.sh b/.github/scripts/create_pr_for_staged_changes.sh deleted file mode 100644 index a35d45cc9e9..00000000000 --- a/.github/scripts/create_pr_for_staged_changes.sh +++ /dev/null @@ -1,116 +0,0 @@ -#!/bin/bash -set -uxo pipefail # enable debugging, prevent accessing unset env vars, prevent masking pipeline errors to the next command - -#docs -#title :create_pr_for_staged_changes.sh -#description :This script will create a PR for staged changes and detect and close duplicate PRs. -#author :@heitorlessa -#date :May 8th 2023 -#version :0.1 -#usage :bash create_pr_for_staged_changes.sh {git_staged_files_or_directories_separated_by_space} -#notes :Meant to use in GitHub Actions only. Temporary branch will be named $TEMP_BRANCH_PREFIX-$GITHUB_RUN_ID -#os_version :Ubuntu 22.04.2 LTS -#required_env_vars :COMMIT_MSG, PR_TITLE, TEMP_BRANCH_PREFIX, GH_TOKEN, GITHUB_RUN_ID, GITHUB_SERVER_URL, GITHUB_REPOSITORY -#============================================================================== - -PR_BODY="This is an automated PR created from the following workflow" -FILENAME=".github/scripts/$(basename "$0")" -readonly PR_BODY -readonly FILENAME - -# Sets GitHub Action with error message to ease troubleshooting -function raise_validation_error() { - echo "::error file=${FILENAME}::$1" - exit 1 -} - -function debug() { - echo "::debug::$1" -} - -function notice() { - echo "::notice file=${FILENAME}::$1" -} - -function has_required_config() { - # Default GitHub Actions Env Vars: https://docs.github.com/en/actions/learn-github-actions/variables#default-environment-variables - debug "Do we have required environment variables?" - test -z "${TEMP_BRANCH_PREFIX}" && raise_validation_error "TEMP_BRANCH_PREFIX env must be set to create a PR" - test -z "${GH_TOKEN}" && raise_validation_error "GH_TOKEN env must be set for GitHub CLI" - test -z "${COMMIT_MSG}" && raise_validation_error "COMMIT_MSG env must be set" - test -z "${PR_TITLE}" && raise_validation_error "PR_TITLE env must be set" - test -z "${GITHUB_RUN_ID}" && raise_validation_error "GITHUB_RUN_ID env must be set to trace Workflow Run ID back to PR" - test -z "${GITHUB_SERVER_URL}" && raise_validation_error "GITHUB_SERVER_URL env must be set to trace Workflow Run ID back to PR" - test -z "${GITHUB_REPOSITORY}" && raise_validation_error "GITHUB_REPOSITORY env must be set to trace Workflow Run ID back to PR" - - set_environment_variables -} - -function set_environment_variables() { - WORKFLOW_URL="${GITHUB_SERVER_URL}"/"${GITHUB_REPOSITORY}"/actions/runs/"${GITHUB_RUN_ID}" # e.g., heitorlessa/aws-lambda-powertools-test/actions/runs/4913570678 - TEMP_BRANCH="${TEMP_BRANCH_PREFIX}"-"${GITHUB_RUN_ID}" # e.g., ci-changelog-4894658712 - - export readonly WORKFLOW_URL - export readonly TEMP_BRANCH -} - -function has_anything_changed() { - debug "Is there an update to the source code?" - HAS_ANY_SOURCE_CODE_CHANGED="$(git status --porcelain)" - - test -z "${HAS_ANY_SOURCE_CODE_CHANGED}" && echo "Nothing to update" && exit 0 -} - -function create_temporary_branch_with_changes() { - debug "Creating branch ${TEMP_BRANCH}" - git checkout -b "${TEMP_BRANCH}" - - debug "Committing staged files: $*" - git add "$@" - git commit -m "${COMMIT_MSG}" - - debug "Creating branch remotely" - git push origin "${TEMP_BRANCH}" -} - -function create_pr() { - debug "Creating PR against ${BRANCH} branch" - NEW_PR_URL=$(gh pr create --title "${PR_TITLE}" --body "${PR_BODY}: ${WORKFLOW_URL}" --base "${BRANCH}") # e.g, https://github.com/awslabs/aws-lambda-powertools/pull/13 - - # greedy remove any string until the last URL path, including the last '/'. https://opensource.com/article/17/6/bash-parameter-expansion - NEW_PR_ID="${NEW_PR_URL##*/}" # 13 - export NEW_PR_URL - export NEW_PR_ID -} - -function close_duplicate_prs() { - debug "Do we have any duplicate PRs?" - DUPLICATE_PRS=$(gh pr list --search "${PR_TITLE}" --json number --jq ".[] | select(.number != ${NEW_PR_ID}) | .number") # e.g, 13\n14 - - debug "Closing duplicated PRs if any" - echo "${DUPLICATE_PRS}" | xargs -L1 gh pr close --delete-branch --comment "Superseded by #${NEW_PR_ID}" - export readonly DUPLICATE_PRS -} - -function report_summary() { - debug "Creating job summary" - echo "### Pull request created successfully :rocket: #${NEW_PR_URL}

Closed duplicated PRs (if any): ${DUPLICATE_PRS}" >>"$GITHUB_STEP_SUMMARY" - - notice "PR_URL is ${NEW_PR_URL}" - notice "PR_BRANCH is ${TEMP_BRANCH}" - notice "PR_DUPLICATES are ${DUPLICATE_PRS}" -} - -function main() { - # Sanity check - has_anything_changed - has_required_config - - create_temporary_branch_with_changes "$@" - create_pr - close_duplicate_prs - - report_summary -} - -main "$@" diff --git a/.github/workflows/build_changelog.yml b/.github/workflows/build_changelog.yml index ebc978022bc..f15275d07a7 100644 --- a/.github/workflows/build_changelog.yml +++ b/.github/workflows/build_changelog.yml @@ -3,17 +3,9 @@ name: Build changelog on: workflow_dispatch: - schedule: - # ┌───────────── minute (0 - 59) - # │ ┌───────────── hour (0 - 23) - # │ │ ┌───────────── day of the month (1 - 31) - # │ │ │ ┌───────────── month (1 - 12 or JAN-DEC) - # │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT) - # │ │ │ │ │ - # │ │ │ │ │ - # │ │ │ │ │ - # * * * * * - - cron: '0 8 * * *' + push: + branches: + - develop jobs: changelog: diff --git a/.github/workflows/reusable_publish_changelog.yml b/.github/workflows/reusable_publish_changelog.yml index 4294dda4a94..f08b23ca9f1 100644 --- a/.github/workflows/reusable_publish_changelog.yml +++ b/.github/workflows/reusable_publish_changelog.yml @@ -4,7 +4,9 @@ on: workflow_call: env: - BRANCH: develop + TEMP_BRANCH_PREFIX: "ci-changelog" + PULL_REQUEST_TITLE: "chore(ci): changelog rebuild" + FILES_TO_COMMIT: "CHANGELOG.md" jobs: publish_changelog: @@ -21,25 +23,13 @@ jobs: uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: fetch-depth: 0 - - name: Git client setup and refresh tip - run: | - git config user.name "Release bot" - git config user.email "aws-devax-open-source@amazon.com" - git config pull.rebase true - git config remote.origin.url >&- || git remote add origin https://github.com/"${origin}" # Git Detached mode (release notes) doesn't have origin - git pull origin "${BRANCH}" - name: "Generate latest changelog" run: make changelog - name: Create PR - run: bash .github/scripts/create_pr_for_staged_changes.sh CHANGELOG.md - env: - COMMIT_MSG: "chore(ci): update changelog with latest changes" - PR_TITLE: "chore(ci): changelog rebuild" - TEMP_BRANCH_PREFIX: "ci-changelog" - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Cleanup orphaned branch - if: failure() - run: git push origin --delete "${TEMP_BRANCH_PREFIX}-${GITHUB_RUN_ID}" || echo "Must have failed before creating temporary branch; no cleanup needed." - env: - TEMP_BRANCH_PREFIX: "ci-changelog" - GITHUB_RUN_ID: ${{ github.run_id }} + id: create-pr + uses: ./.github/actions/create-pr + with: + files: ${{ env.FILES_TO_COMMIT }} + temp_branch_prefix: ${{ env.TEMP_BRANCH_PREFIX }} + pull_request_title: ${{ env.PULL_REQUEST_TITLE }} + github_token: ${{ secrets.GITHUB_TOKEN }} From 5850aa8626d21aaf06fb1225f7f46edeed22e71b Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 12 May 2023 15:34:48 +0200 Subject: [PATCH 25/76] chore(ci): bump package version after release via pull request (#2239) * chore: convert create-pr steps into composite action Signed-off-by: heitorlessa * chore(ci): changelog to use new create-pr action Signed-off-by: heitorlessa * chore(ci): revert changelog to trigger on push Signed-off-by: heitorlessa * chore: document custom action Signed-off-by: heitorlessa * chore: add support for any target branch Signed-off-by: heitorlessa * chore: create a PR to bump version after release Signed-off-by: heitorlessa * chore: write permission is no longer necessary Signed-off-by: heitorlessa * chore: remove changelog from release * chore: address leandro's feedback Signed-off-by: Heitor Lessa * chore: address leandro's feedback Signed-off-by: Heitor Lessa --------- Signed-off-by: heitorlessa Signed-off-by: Heitor Lessa --- .github/workflows/release.yml | 47 +++++++++++++++++------------------ 1 file changed, 23 insertions(+), 24 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 38aadc4d873..f062baa01b2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -51,7 +51,7 @@ jobs: build: runs-on: aws-lambda-powertools_ubuntu-latest_4-core permissions: - contents: write + contents: read outputs: RELEASE_VERSION: ${{ steps.release_version.outputs.RELEASE_VERSION }} env: @@ -79,13 +79,6 @@ jobs: - name: Run all tests, linting and baselines if: ${{ !inputs.skip_code_quality }} run: make pr - - name: Git client setup and refresh tip - run: | - git config user.name "Release bot" - git config user.email "aws-devax-open-source@amazon.com" - git config pull.rebase true - git config remote.origin.url >&- || git remote add origin https://github.com/"${ORIGIN}" # Git Detached mode (release notes) doesn't have origin - git pull origin "${BRANCH}" - name: Bump package version id: versioning run: poetry version "${RELEASE_VERSION}" @@ -105,16 +98,6 @@ jobs: # and also future-proof for when we switch to protected branch and update via PR key: ${{ runner.os }}-${{ env.RELEASE_VERSION }}-${{ hashFiles('**/poetry.lock') }} - - name: Update version in trunk - if: steps.versioning.outcome == 'success' - run: | - HAS_CHANGE=$(git status --porcelain) - test -z "${HAS_CHANGE}" && echo "Nothing to update" && exit 0 - git add pyproject.toml - git commit -m "bump version to ${RELEASE_VERSION}" --no-verify - git pull origin "${BRANCH}" # prevents concurrent branch update failing push - git push origin HEAD:refs/heads/"${BRANCH}" - release: needs: build environment: release @@ -143,12 +126,6 @@ jobs: # with: # repository-url: https://test.pypi.org/legacy/ - changelog: - needs: release - permissions: - contents: write - uses: ./.github/workflows/reusable_publish_changelog.yml - # NOTE: Watch out for the depth limit of 4 nested workflow_calls. # publish_layer -> publish_v2_layer -> reusable_deploy_v2_layer_stack -> reusable_update_v2_layer_arn_docs publish_layer: @@ -163,6 +140,28 @@ jobs: latest_published_version: ${{ needs.build.outputs.RELEASE_VERSION }} pre_release: ${{ inputs.pre_release }} + bump_version: + needs: [build, release] + permissions: + contents: write # create-pr action creates a temporary branch + pull-requests: write # create-pr action creates a PR using the temporary branch + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ needs.build.outputs.RELEASE_VERSION }} + steps: + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - name: Bump package version + id: versioning + run: poetry version "${RELEASE_VERSION}" + - name: Create PR + id: create-pr + uses: ./.github/actions/create-pr + with: + files: "pyproject.toml" + temp_branch_prefix: "ci-bump" + pull_request_title: "chore(ci): bump version to ${{ env.RELEASE_VERSION }}" + github_token: ${{ secrets.GITHUB_TOKEN }} + post_release: needs: [build, release, publish_layer] permissions: From 5b144524de2c1c512f34aebdcf78535926b49a13 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 12 May 2023 16:56:28 +0200 Subject: [PATCH 26/76] chore(ci): update layer ARN docs and create PR during release (#2240) --- .../create-pr/create_pr_for_staged_changes.sh | 7 +- .github/workflows/publish_v2_layer.yml | 94 +++++++++++++++++-- .github/workflows/release.yml | 37 ++++++-- .../reusable_deploy_v2_layer_stack.yml | 17 ++-- .github/workflows/reusable_publish_docs.yml | 9 +- .../reusable_update_v2_layer_arn_docs.yml | 52 ---------- layer/scripts/update_layer_arn.sh | 2 +- 7 files changed, 136 insertions(+), 82 deletions(-) delete mode 100644 .github/workflows/reusable_update_v2_layer_arn_docs.yml diff --git a/.github/actions/create-pr/create_pr_for_staged_changes.sh b/.github/actions/create-pr/create_pr_for_staged_changes.sh index 2f32ab24342..99bcd22b97d 100755 --- a/.github/actions/create-pr/create_pr_for_staged_changes.sh +++ b/.github/actions/create-pr/create_pr_for_staged_changes.sh @@ -3,7 +3,7 @@ set -uo pipefail # prevent accessing unset env vars, prevent masking pipeline er #docs #title :create_pr_for_staged_changes.sh -#description :This script will create a PR for staged changes, detect and close duplicate PRs. +#description :This script will create a PR for staged changes, detect and close duplicate PRs. All PRs will be omitted from Release Notes and Changelogs #author :@heitorlessa #date :May 8th 2023 #version :0.1 @@ -61,6 +61,8 @@ function set_environment_variables() { export readonly PR_BODY="This is an automated PR created from the following workflow" export readonly FILENAME=".github/scripts/$(basename "$0")" export readonly NO_DUPLICATES_MESSAGE="No duplicated PRs found" + export readonly SKIP_LABEL="skip-changelog" + end_span } @@ -86,7 +88,8 @@ function create_temporary_branch_with_changes() { function create_pr() { start_span "Creating PR against ${TEMP_BRANCH} branch" - NEW_PR_URL=$(gh pr create --title "${PR_TITLE}" --body "${PR_BODY}: ${WORKFLOW_URL}" --base "${BASE_BRANCH}" || error "Failed to create PR") # e.g, https://github.com/awslabs/aws-lambda-powertools/pull/13 + # TODO: create label + NEW_PR_URL=$(gh pr create --title "${PR_TITLE}" --body "${PR_BODY}: ${WORKFLOW_URL}" --base "${BASE_BRANCH}" --label "${SKIP_LABEL}" || error "Failed to create PR") # e.g, https://github.com/awslabs/aws-lambda-powertools/pull/13 # greedy remove any string until the last URL path, including the last '/'. https://opensource.com/article/17/6/bash-parameter-expansion debug "Extracing PR Number from PR URL: "${NEW_PR_URL}"" diff --git a/.github/workflows/publish_v2_layer.yml b/.github/workflows/publish_v2_layer.yml index 7820678e813..dcdc43a6e2c 100644 --- a/.github/workflows/publish_v2_layer.yml +++ b/.github/workflows/publish_v2_layer.yml @@ -1,10 +1,5 @@ name: Deploy v2 layer to all regions -permissions: - id-token: write - contents: write - pages: write - on: workflow_dispatch: inputs: @@ -31,7 +26,11 @@ on: jobs: build-layer: permissions: + # lower privilege propagated from parent workflow (release.yml) contents: read + id-token: none + pages: none + pull-requests: none runs-on: aws-lambda-powertools_ubuntu-latest_8-core defaults: run: @@ -87,6 +86,12 @@ jobs: beta: needs: build-layer + # lower privilege propagated from parent workflow (release.yml) + permissions: + id-token: write + contents: read + pages: write # docs will be updated with latest Layer ARNs + pull-requests: write # creation-action will create a PR with Layer ARN updates uses: ./.github/workflows/reusable_deploy_v2_layer_stack.yml secrets: inherit with: @@ -97,6 +102,12 @@ jobs: prod: needs: beta + # lower privilege propagated from parent workflow (release.yml) + permissions: + id-token: write + contents: read + pages: write # docs will be updated with latest Layer ARNs + pull-requests: write # creation-action will create a PR with Layer ARN updates uses: ./.github/workflows/reusable_deploy_v2_layer_stack.yml secrets: inherit with: @@ -107,6 +118,12 @@ jobs: sar-beta: needs: build-layer + permissions: + # lower privilege propagated from parent workflow (release.yml) + id-token: write + contents: read + pull-requests: none + pages: none uses: ./.github/workflows/reusable_deploy_v2_sar.yml secrets: inherit with: @@ -117,6 +134,12 @@ jobs: sar-prod: needs: [build-layer, sar-beta] + permissions: + # lower privilege propagated from parent workflow (release.yml) + id-token: write + contents: read + pull-requests: none + pages: none uses: ./.github/workflows/reusable_deploy_v2_sar.yml secrets: inherit with: @@ -125,10 +148,62 @@ jobs: environment: "layer-prod" package-version: ${{ inputs.latest_published_version }} + # Updating the documentation with the latest Layer ARNs is a two-phase process + # + # 1. Update layer ARNs with latest deployed locally and create a PR with these changes + # 2. Pull from temporary branch with these changes and update the docs we're releasing + # + # This keeps our permissions tight and we don't run into a conflict, + # where a new release creates a new doc (2.16.0) while layers are still pointing to 2.15 + # because the PR has to be merged while release process is running + + update_v2_layer_arn_docs: + needs: prod + outputs: + temp_branch: ${{ steps.create-pr.outputs.temp_branch }} + runs-on: ubuntu-latest + permissions: + # lower privilege propagated from parent workflow (release.yml) + contents: write + pull-requests: write + id-token: none + pages: none + steps: + - name: Checkout repository # reusable workflows start clean, so we need to checkout again + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + with: + fetch-depth: 0 + - name: Download CDK layer artifact + uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 + with: + name: cdk-layer-stack + path: cdk-layer-stack/ + - name: Replace layer versions in documentation + run: | + ls -la cdk-layer-stack/ + ./layer/scripts/update_layer_arn.sh cdk-layer-stack + # NOTE: It felt unnecessary creating yet another PR to update changelog w/ latest tag + # since this is the only step in the release where we update docs from a temp branch + - name: Update changelog with latest tag + run: make changelog + - name: Create PR + id: create-pr + uses: ./.github/actions/create-pr + with: + files: "docs/index.md examples CHANGELOG.md" + temp_branch_prefix: "ci-layer-docs" + pull_request_title: "chore(ci): layer docs update" + github_token: ${{ secrets.GITHUB_TOKEN }} + + prepare_docs_alias: runs-on: ubuntu-latest permissions: + # lower privilege propagated from parent workflow (release.yml) contents: read + pages: none + id-token: none + pull-requests: none outputs: DOCS_ALIAS: ${{ steps.set-alias.outputs.DOCS_ALIAS }} steps: @@ -141,13 +216,16 @@ jobs: fi echo DOCS_ALIAS="$DOCS_ALIAS" >> "$GITHUB_OUTPUT" - release-docs: - needs: [prod, prepare_docs_alias] + release_docs: + needs: [update_v2_layer_arn_docs, prepare_docs_alias] permissions: + # lower privilege propagated from parent workflow (release.yml) contents: write pages: write + pull-requests: none + id-token: none uses: ./.github/workflows/reusable_publish_docs.yml with: version: ${{ inputs.latest_published_version }} alias: ${{ needs.prepare_docs_alias.outputs.DOCS_ALIAS }} - detached_mode: true + git_ref: ${{ needs.update_v2_layer_arn_docs.outputs.temp_branch }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f062baa01b2..789104dd6db 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -5,12 +5,14 @@ name: Release # === Automated activities === # # 1. Run tests, linting, security and complexity base line -# 2. Bump package version, build release artifact, and generate latest Changelog +# 2. Bump package version and build release artifact # 3. Publish package to PyPi prod repository using cached artifact -# 4. Kick off Layers pipeline to compile and publish latest version -# 5. Updates documentation to use the latest Layer ARN for all commercial regions -# 6. Builds a new user guide and API docs with release version; update /latest pointing to newly released version -# 7. Close all issues labeled "pending-release" and notify customers about the release +# 4. Compile Layer and kick off pipeline for beta, prod, and canary releases +# 5. Update docs with latest Layer ARNs and Changelog +# 6. Create PR to update trunk so staged docs also point to the latest Layer ARN, when merged +# 7. Builds a new user guide and API docs with release version; update /latest pointing to newly released version +# 8. Create PR to update package version on trunk +# 9. Close all issues labeled "pending-release" and notify customers about the release # # === Manual activities === # @@ -126,15 +128,36 @@ jobs: # with: # repository-url: https://test.pypi.org/legacy/ + create_tag: + needs: [build, release] + runs-on: ubuntu-latest + permissions: + contents: write + env: + RELEASE_VERSION: ${{ needs.build.outputs.RELEASE_VERSION }} + steps: + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - id: setup-git + name: Git client setup and refresh tip + run: | + git config user.name "Powertools bot" + git config user.email "aws-lambda-powertools-feedback@amazon.com" + git config remote.origin.url >&- + - name: Create Git Tag + run: | + git tag -a v"${RELEASE_VERSION}" -m "release_version: v${RELEASE_VERSION}" + git push origin v"${RELEASE_VERSION}" + # NOTE: Watch out for the depth limit of 4 nested workflow_calls. - # publish_layer -> publish_v2_layer -> reusable_deploy_v2_layer_stack -> reusable_update_v2_layer_arn_docs + # publish_layer -> publish_v2_layer -> reusable_deploy_v2_layer_stack publish_layer: - needs: [build, release] + needs: [build, release, create_tag] secrets: inherit permissions: id-token: write contents: write pages: write + pull-requests: write uses: ./.github/workflows/publish_v2_layer.yml with: latest_published_version: ${{ needs.build.outputs.RELEASE_VERSION }} diff --git a/.github/workflows/reusable_deploy_v2_layer_stack.yml b/.github/workflows/reusable_deploy_v2_layer_stack.yml index 912849c2e2c..b1113ff3e40 100644 --- a/.github/workflows/reusable_deploy_v2_layer_stack.yml +++ b/.github/workflows/reusable_deploy_v2_layer_stack.yml @@ -1,9 +1,5 @@ name: Deploy CDK Layer v2 stack -permissions: - id-token: write - contents: write - on: workflow_call: inputs: @@ -28,6 +24,12 @@ jobs: deploy-cdk-stack: runs-on: ubuntu-latest environment: ${{ inputs.environment }} + # lower privilege propagated from parent workflow (publish_v2_layer.yml) + permissions: + id-token: write + pull-requests: none + contents: read + pages: none defaults: run: working-directory: ./layer @@ -149,10 +151,3 @@ jobs: retention-days: 1 - name: CDK Deploy Canary run: npx cdk deploy --app cdk.out --context region=${{ matrix.region }} --parameters DeployStage="${{ inputs.stage }}" --parameters HasARM64Support=${{ matrix.has_arm64_support }} 'CanaryV2Stack' --require-approval never --verbose - - update_v2_layer_arn_docs: - needs: deploy-cdk-stack - if: ${{ inputs.stage == 'PROD' }} - uses: ./.github/workflows/reusable_update_v2_layer_arn_docs.yml - with: - latest_published_version: ${{ inputs.latest_published_version }} diff --git a/.github/workflows/reusable_publish_docs.yml b/.github/workflows/reusable_publish_docs.yml index 9359229230f..eb0d8f8598a 100644 --- a/.github/workflows/reusable_publish_docs.yml +++ b/.github/workflows/reusable_publish_docs.yml @@ -1,7 +1,6 @@ name: Reusable publish documentation env: - BRANCH: develop ORIGIN: awslabs/aws-lambda-powertools-python on: @@ -20,6 +19,11 @@ on: required: false default: false type: boolean + git_ref: + description: "Branch or commit ID to checkout from" + required: false + type: string + default: develop permissions: contents: write @@ -36,6 +40,7 @@ jobs: - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: fetch-depth: 0 + ref: ${{ inputs.git_ref }} - name: Install poetry run: pipx install poetry - name: Set up Python @@ -56,6 +61,8 @@ jobs: git config pull.rebase true git config remote.origin.url >&- || git remote add origin https://github.com/"$ORIGIN" git pull origin "$BRANCH" + env: + BRANCH: ${{ inputs.git_ref }} - name: Build docs website and API reference env: VERSION: ${{ inputs.version }} diff --git a/.github/workflows/reusable_update_v2_layer_arn_docs.yml b/.github/workflows/reusable_update_v2_layer_arn_docs.yml deleted file mode 100644 index 5fbf6814dcd..00000000000 --- a/.github/workflows/reusable_update_v2_layer_arn_docs.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: Update V2 Layer ARN Docs - -on: - workflow_call: - inputs: - latest_published_version: - description: "Latest PyPi published version to rebuild latest docs for, e.g. v2.0.0" - type: string - required: true - -permissions: - contents: write - -env: - BRANCH: develop - -jobs: - publish_v2_layer_arn: - # Force Github action to run only a single job at a time (based on the group name) - # This is to prevent race-condition and inconsistencies with changelog push - concurrency: - group: changelog-build - runs-on: ubuntu-latest - steps: - - name: Checkout repository # reusable workflows start clean, so we need to checkout again - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - with: - fetch-depth: 0 - - name: Git client setup and refresh tip - run: | - git config user.name "Release bot" - git config user.email "aws-devax-open-source@amazon.com" - git config pull.rebase true - git config remote.origin.url >&- || git remote add origin https://github.com/"${origin}" # Git Detached mode (release notes) doesn't have origin - git pull origin "${BRANCH}" - - name: Download CDK layer artifact - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - name: cdk-layer-stack - path: cdk-layer-stack/ - - name: Replace layer versions in documentation - run: | - ls -la cdk-layer-stack/ - ./layer/scripts/update_layer_arn.sh cdk-layer-stack - - name: Update documentation in trunk - run: | - HAS_CHANGE=$(git status --porcelain) - test -z "${HAS_CHANGE}" && echo "Nothing to update" && exit 0 - git add docs/index.md examples - git commit -m "chore: update v2 layer ARN on documentation" - git pull origin "${BRANCH}" # prevents concurrent branch update failing push - git push origin HEAD:refs/heads/"${BRANCH}" diff --git a/layer/scripts/update_layer_arn.sh b/layer/scripts/update_layer_arn.sh index 0ad3e1617fe..1bbf63c2b88 100755 --- a/layer/scripts/update_layer_arn.sh +++ b/layer/scripts/update_layer_arn.sh @@ -1,6 +1,6 @@ #!/bin/bash -# This script is run during the reusable_update_v2_layer_arn_docs CI job, +# This script is run during the publish_v2_layer.yml CI job, # and it is responsible for replacing the layer ARN in our documentation, # based on the output files generated by CDK when deploying to each pseudo_region. # From 1d84bad6f9142a1fb30a00e338125d1b44b6b6d6 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 12 May 2023 17:02:51 +0200 Subject: [PATCH 27/76] chore(ci): fail create-pr when branch cannot be created or behind tip --- .github/actions/create-pr/create_pr_for_staged_changes.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/create-pr/create_pr_for_staged_changes.sh b/.github/actions/create-pr/create_pr_for_staged_changes.sh index 99bcd22b97d..9fd1cecd366 100755 --- a/.github/actions/create-pr/create_pr_for_staged_changes.sh +++ b/.github/actions/create-pr/create_pr_for_staged_changes.sh @@ -82,7 +82,7 @@ function create_temporary_branch_with_changes() { echo "$@" | xargs -n1 git add || error "Failed to add staged changes: "$@"" git commit -m "${PR_TITLE}" - git push origin "${TEMP_BRANCH}" + git push origin "${TEMP_BRANCH}" || error "Failed to create new temporary branch" end_span } From 0e8a5cb37aa10d812236252504d896d596e4d597 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 12 May 2023 17:04:39 +0200 Subject: [PATCH 28/76] chore(ci): changelog rebuild (#2245) Co-authored-by: Powertools bot --- CHANGELOG.md | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1193ad22546..e72b824f0dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,22 +11,27 @@ ## Maintenance * **ci:** skip analytics on forks ([#2225](https://github.com/awslabs/aws-lambda-powertools-python/issues/2225)) +* **ci:** update layer ARN docs and create PR during release ([#2240](https://github.com/awslabs/aws-lambda-powertools-python/issues/2240)) +* **ci:** bump package version after release via pull request ([#2239](https://github.com/awslabs/aws-lambda-powertools-python/issues/2239)) +* **ci:** convert create-pr steps into composite action ([#2238](https://github.com/awslabs/aws-lambda-powertools-python/issues/2238)) +* **ci:** changelog rebuild ([#2241](https://github.com/awslabs/aws-lambda-powertools-python/issues/2241)) * **ci:** enforce zero trust for third party workflows ([#2215](https://github.com/awslabs/aws-lambda-powertools-python/issues/2215)) -* **ci:** changelog rebuild ([#2236](https://github.com/awslabs/aws-lambda-powertools-python/issues/2236)) * **ci:** remove auto-merge workflow ([#2214](https://github.com/awslabs/aws-lambda-powertools-python/issues/2214)) +* **ci:** changelog rebuild ([#2236](https://github.com/awslabs/aws-lambda-powertools-python/issues/2236)) +* **ci:** schedule changelog to rebuild daily at 8am, and on release only ([#2216](https://github.com/awslabs/aws-lambda-powertools-python/issues/2216)) * **ci:** changelog rebuild ([#2232](https://github.com/awslabs/aws-lambda-powertools-python/issues/2232)) * **ci:** changelog rebuild ([#2230](https://github.com/awslabs/aws-lambda-powertools-python/issues/2230)) -* **ci:** schedule changelog to rebuild daily at 8am, and on release only ([#2216](https://github.com/awslabs/aws-lambda-powertools-python/issues/2216)) +* **ci:** fail create-pr when branch cannot be created or behind tip * **ci:** create pull request on changelog update ([#2224](https://github.com/awslabs/aws-lambda-powertools-python/issues/2224)) * **deps:** bump zgosalvez/github-actions-ensure-sha-pinned-actions from 2.1.2 to 2.1.3 ([#2227](https://github.com/awslabs/aws-lambda-powertools-python/issues/2227)) -* **deps-dev:** bump aws-cdk from 2.78.0 to 2.79.0 ([#2235](https://github.com/awslabs/aws-lambda-powertools-python/issues/2235)) +* **deps-dev:** bump mkdocs-material from 9.1.9 to 9.1.11 ([#2229](https://github.com/awslabs/aws-lambda-powertools-python/issues/2229)) * **deps-dev:** bump cfn-lint from 0.77.4 to 0.77.5 ([#2228](https://github.com/awslabs/aws-lambda-powertools-python/issues/2228)) * **deps-dev:** bump mypy-boto3-cloudwatch from 1.26.99 to 1.26.127 ([#2219](https://github.com/awslabs/aws-lambda-powertools-python/issues/2219)) * **deps-dev:** bump types-requests from 2.29.0.0 to 2.30.0.0 ([#2220](https://github.com/awslabs/aws-lambda-powertools-python/issues/2220)) * **deps-dev:** bump mypy-boto3-s3 from 1.26.116 to 1.26.127 ([#2218](https://github.com/awslabs/aws-lambda-powertools-python/issues/2218)) -* **deps-dev:** bump mkdocs-material from 9.1.9 to 9.1.11 ([#2229](https://github.com/awslabs/aws-lambda-powertools-python/issues/2229)) * **deps-dev:** bump types-python-dateutil from 2.8.19.12 to 2.8.19.13 ([#2234](https://github.com/awslabs/aws-lambda-powertools-python/issues/2234)) * **deps-dev:** bump mypy from 1.2.0 to 1.3.0 ([#2233](https://github.com/awslabs/aws-lambda-powertools-python/issues/2233)) +* **deps-dev:** bump aws-cdk from 2.78.0 to 2.79.0 ([#2235](https://github.com/awslabs/aws-lambda-powertools-python/issues/2235)) * **deps-dev:** bump aws-cdk from 2.77.0 to 2.78.0 ([#2202](https://github.com/awslabs/aws-lambda-powertools-python/issues/2202)) From a7a63801f8817f93672cd546ab98c4b2f124a587 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 12 May 2023 17:10:29 +0200 Subject: [PATCH 29/76] chore(ci): changelog rebuild (#2246) Co-authored-by: Powertools bot --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e72b824f0dc..c67dba2494f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ ## Maintenance * **ci:** skip analytics on forks ([#2225](https://github.com/awslabs/aws-lambda-powertools-python/issues/2225)) +* **ci:** fail create-pr when branch cannot be created or behind tip * **ci:** update layer ARN docs and create PR during release ([#2240](https://github.com/awslabs/aws-lambda-powertools-python/issues/2240)) * **ci:** bump package version after release via pull request ([#2239](https://github.com/awslabs/aws-lambda-powertools-python/issues/2239)) * **ci:** convert create-pr steps into composite action ([#2238](https://github.com/awslabs/aws-lambda-powertools-python/issues/2238)) @@ -21,7 +22,7 @@ * **ci:** schedule changelog to rebuild daily at 8am, and on release only ([#2216](https://github.com/awslabs/aws-lambda-powertools-python/issues/2216)) * **ci:** changelog rebuild ([#2232](https://github.com/awslabs/aws-lambda-powertools-python/issues/2232)) * **ci:** changelog rebuild ([#2230](https://github.com/awslabs/aws-lambda-powertools-python/issues/2230)) -* **ci:** fail create-pr when branch cannot be created or behind tip +* **ci:** changelog rebuild ([#2245](https://github.com/awslabs/aws-lambda-powertools-python/issues/2245)) * **ci:** create pull request on changelog update ([#2224](https://github.com/awslabs/aws-lambda-powertools-python/issues/2224)) * **deps:** bump zgosalvez/github-actions-ensure-sha-pinned-actions from 2.1.2 to 2.1.3 ([#2227](https://github.com/awslabs/aws-lambda-powertools-python/issues/2227)) * **deps-dev:** bump mkdocs-material from 9.1.9 to 9.1.11 ([#2229](https://github.com/awslabs/aws-lambda-powertools-python/issues/2229)) From ed2b07b6838414f2e63feefa276e4c155b81e0fa Mon Sep 17 00:00:00 2001 From: Ruben Fonseca Date: Fri, 12 May 2023 17:34:30 +0200 Subject: [PATCH 30/76] chore(ci): filter out bot commits from CHANGELOG --- .chglog/CHANGELOG.tpl.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.chglog/CHANGELOG.tpl.md b/.chglog/CHANGELOG.tpl.md index b8919554b80..beb340ad645 100755 --- a/.chglog/CHANGELOG.tpl.md +++ b/.chglog/CHANGELOG.tpl.md @@ -10,7 +10,9 @@ ## {{ .Title }} {{ range .Commits -}} +{{ if and (not (hasPrefix .Subject "changelog rebuild")) (not (hasPrefix .Subject "layer docs update")) (not (hasPrefix .Subject "bump version to")) -}} * {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }} +{{ end -}} {{ end }} {{ end -}} {{ end -}} @@ -24,7 +26,9 @@ ## {{ .Title }} {{ range .Commits -}} +{{ if and (not (hasPrefix .Subject "changelog rebuild")) (not (hasPrefix .Subject "layer docs update")) (not (hasPrefix .Subject "bump version to")) -}} * {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }} +{{ end -}} {{ end }} {{ end -}} From 8c025b71054e12bdf6da928bb5537e0e3057dbd3 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 12 May 2023 17:36:22 +0200 Subject: [PATCH 31/76] chore(ci): changelog rebuild (#2248) Co-authored-by: Powertools bot --- CHANGELOG.md | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c67dba2494f..e47008ec5d9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,18 +15,14 @@ * **ci:** update layer ARN docs and create PR during release ([#2240](https://github.com/awslabs/aws-lambda-powertools-python/issues/2240)) * **ci:** bump package version after release via pull request ([#2239](https://github.com/awslabs/aws-lambda-powertools-python/issues/2239)) * **ci:** convert create-pr steps into composite action ([#2238](https://github.com/awslabs/aws-lambda-powertools-python/issues/2238)) -* **ci:** changelog rebuild ([#2241](https://github.com/awslabs/aws-lambda-powertools-python/issues/2241)) * **ci:** enforce zero trust for third party workflows ([#2215](https://github.com/awslabs/aws-lambda-powertools-python/issues/2215)) * **ci:** remove auto-merge workflow ([#2214](https://github.com/awslabs/aws-lambda-powertools-python/issues/2214)) -* **ci:** changelog rebuild ([#2236](https://github.com/awslabs/aws-lambda-powertools-python/issues/2236)) * **ci:** schedule changelog to rebuild daily at 8am, and on release only ([#2216](https://github.com/awslabs/aws-lambda-powertools-python/issues/2216)) -* **ci:** changelog rebuild ([#2232](https://github.com/awslabs/aws-lambda-powertools-python/issues/2232)) -* **ci:** changelog rebuild ([#2230](https://github.com/awslabs/aws-lambda-powertools-python/issues/2230)) -* **ci:** changelog rebuild ([#2245](https://github.com/awslabs/aws-lambda-powertools-python/issues/2245)) * **ci:** create pull request on changelog update ([#2224](https://github.com/awslabs/aws-lambda-powertools-python/issues/2224)) +* **ci:** filter out bot commits from CHANGELOG * **deps:** bump zgosalvez/github-actions-ensure-sha-pinned-actions from 2.1.2 to 2.1.3 ([#2227](https://github.com/awslabs/aws-lambda-powertools-python/issues/2227)) -* **deps-dev:** bump mkdocs-material from 9.1.9 to 9.1.11 ([#2229](https://github.com/awslabs/aws-lambda-powertools-python/issues/2229)) * **deps-dev:** bump cfn-lint from 0.77.4 to 0.77.5 ([#2228](https://github.com/awslabs/aws-lambda-powertools-python/issues/2228)) +* **deps-dev:** bump mkdocs-material from 9.1.9 to 9.1.11 ([#2229](https://github.com/awslabs/aws-lambda-powertools-python/issues/2229)) * **deps-dev:** bump mypy-boto3-cloudwatch from 1.26.99 to 1.26.127 ([#2219](https://github.com/awslabs/aws-lambda-powertools-python/issues/2219)) * **deps-dev:** bump types-requests from 2.29.0.0 to 2.30.0.0 ([#2220](https://github.com/awslabs/aws-lambda-powertools-python/issues/2220)) * **deps-dev:** bump mypy-boto3-s3 from 1.26.116 to 1.26.127 ([#2218](https://github.com/awslabs/aws-lambda-powertools-python/issues/2218)) @@ -2203,7 +2199,6 @@ * fix dependabot commit messages prefix * fix dependabot unique set config * bump mkdocs-material from 7.1.5 to 7.1.6 ([#451](https://github.com/awslabs/aws-lambda-powertools-python/issues/451)) -* bump version to 1.17.0 * bump boto3 from 1.17.78 to 1.17.84 ([#449](https://github.com/awslabs/aws-lambda-powertools-python/issues/449)) * update mergify to require approval on dependabot ([#456](https://github.com/awslabs/aws-lambda-powertools-python/issues/456)) * bump actions/setup-python from 1 to 2.2.2 ([#445](https://github.com/awslabs/aws-lambda-powertools-python/issues/445)) @@ -2602,7 +2597,6 @@ ## Maintenance -* bump version to 1.9.0 ## Pull Requests @@ -2714,7 +2708,6 @@ ## Maintenance * fix repository URL -* bump version to 1.7.0 * spacing * typo in list * typo on code generation tool @@ -2883,7 +2876,6 @@ ## Maintenance -* bump version to 1.5.0 ([#158](https://github.com/awslabs/aws-lambda-powertools-python/issues/158)) * tiny changes for readability * add debug logging for sqs batch processing * remove middlewares module, moving decorator functionality to base and sqs @@ -2980,7 +2972,6 @@ ## Maintenance -* bump version to 1.3.0 ([#122](https://github.com/awslabs/aws-lambda-powertools-python/issues/122)) @@ -2991,7 +2982,6 @@ ## Maintenance -* bump version to 1.2.0 ([#119](https://github.com/awslabs/aws-lambda-powertools-python/issues/119)) @@ -3022,7 +3012,6 @@ ## Maintenance -* bump version to 1.1.2 * suppress LGTM alert * add autocomplete as unreleased * remove unused stdout fixture @@ -3127,7 +3116,6 @@ ## Maintenance * update CHANGELOG -* bump version to 0.11.0 ([#76](https://github.com/awslabs/aws-lambda-powertools-python/issues/76)) From 825d6a5014841635c04257cf2dc59f976be86e50 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 May 2023 23:39:19 +0100 Subject: [PATCH 32/76] chore(deps-dev): bump aws-cdk from 2.79.0 to 2.79.1 (#2252) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 14 +++++++------- package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/package-lock.json b/package-lock.json index 1d19c8115e2..2660f7c962b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,13 +8,13 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.79.0" + "aws-cdk": "^2.79.1" } }, "node_modules/aws-cdk": { - "version": "2.79.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.79.0.tgz", - "integrity": "sha512-pVOw5QBbtmSA+PQmGwSR1qbtgC0caC6+Vc6bCZM4aK9nTh7iUXv4MKqXCUfxN6waNgYpweUCb29gxo6DOe99Yw==", + "version": "2.79.1", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.79.1.tgz", + "integrity": "sha512-N6intzdRFqrHC+O3Apty34RiTev2+bzvRtUbehVd5IyAmTvLsgE/jlhPUIJV2POSAK+bKOV+ZWEp9qMOj1hq8A==", "dev": true, "bin": { "cdk": "bin/cdk" @@ -43,9 +43,9 @@ }, "dependencies": { "aws-cdk": { - "version": "2.79.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.79.0.tgz", - "integrity": "sha512-pVOw5QBbtmSA+PQmGwSR1qbtgC0caC6+Vc6bCZM4aK9nTh7iUXv4MKqXCUfxN6waNgYpweUCb29gxo6DOe99Yw==", + "version": "2.79.1", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.79.1.tgz", + "integrity": "sha512-N6intzdRFqrHC+O3Apty34RiTev2+bzvRtUbehVd5IyAmTvLsgE/jlhPUIJV2POSAK+bKOV+ZWEp9qMOj1hq8A==", "dev": true, "requires": { "fsevents": "2.3.2" diff --git a/package.json b/package.json index 68477155422..d461cfb902f 100644 --- a/package.json +++ b/package.json @@ -2,6 +2,6 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.79.0" + "aws-cdk": "^2.79.1" } } From b39e2a4003cd844f7b513ba44deb979c4629e3d8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 May 2023 23:39:33 +0100 Subject: [PATCH 33/76] chore(deps-dev): bump pytest-xdist from 3.2.1 to 3.3.0 (#2251) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 12 ++++++------ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5f24792ab66..1d42e4bf8ea 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "anyio" @@ -2291,14 +2291,14 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-xdist" -version = "3.2.1" +version = "3.3.0" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-xdist-3.2.1.tar.gz", hash = "sha256:1849bd98d8b242b948e472db7478e090bf3361912a8fed87992ed94085f54727"}, - {file = "pytest_xdist-3.2.1-py3-none-any.whl", hash = "sha256:37290d161638a20b672401deef1cba812d110ac27e35d213f091d15b8beb40c9"}, + {file = "pytest-xdist-3.3.0.tar.gz", hash = "sha256:d42c9efb388da35480878ef4b2993704c6cea800c8bafbe85a8cdc461baf0748"}, + {file = "pytest_xdist-3.3.0-py3-none-any.whl", hash = "sha256:76f7683d4f993eaff91c9cb0882de0465c4af9c6dd3debc903833484041edc1a"}, ] [package.dependencies] @@ -3068,7 +3068,7 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -all = ["aws-xray-sdk", "fastjsonschema", "pydantic"] +all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] aws-sdk = ["boto3"] parser = ["pydantic"] tracer = ["aws-xray-sdk"] @@ -3077,4 +3077,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "fe686c11217e31bf5fd24895d0224fc3e44c9da4192c6b64d245af1c8033a9cd" +content-hash = "7732853de6d7b5740a0ecafa2d47a53d5251e279402371cc9ae0e6d64ef86254" diff --git a/pyproject.toml b/pyproject.toml index 006dd298516..f7ce635dbc9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,7 +63,7 @@ flake8-bugbear = "^23.3.12" mkdocs-git-revision-date-plugin = "^0.3.2" mike = "^1.1.2" retry = "^0.9.2" -pytest-xdist = "^3.2.1" +pytest-xdist = "^3.3.0" aws-cdk-lib = "^2.75.0" "aws-cdk.aws-apigatewayv2-alpha" = "^2.38.1-alpha.0" "aws-cdk.aws-apigatewayv2-integrations-alpha" = "^2.38.1-alpha.0" From eb3e795b437f6b8b1de99126380de4b6d4a3311a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 May 2023 23:46:22 +0100 Subject: [PATCH 34/76] chore(deps-dev): bump mkdocs-material from 9.1.11 to 9.1.12 (#2253) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1d42e4bf8ea..7f9ffbcaae0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1574,14 +1574,14 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "9.1.11" +version = "9.1.12" description = "Documentation that simply works" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material-9.1.11-py3-none-any.whl", hash = "sha256:fbc86d50ec2cf34d40d5c4365780f290ceedde23f1a0704323b34e7f16b0c0dd"}, - {file = "mkdocs_material-9.1.11.tar.gz", hash = "sha256:f5d473eb79d6640a5e668d4b2ab5b9de5e76ae0a0e2d864112df0cfe9016dc1d"}, + {file = "mkdocs_material-9.1.12-py3-none-any.whl", hash = "sha256:68c57d95d10104179c8c3ce9a88ee9d2322a5145b3d0f1f38ff686253fb5ec98"}, + {file = "mkdocs_material-9.1.12.tar.gz", hash = "sha256:d4ebe9b5031ce63a265c19fb5eab4d27ea4edadb05de206372e831b2b7570fb5"}, ] [package.dependencies] @@ -3077,4 +3077,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "7732853de6d7b5740a0ecafa2d47a53d5251e279402371cc9ae0e6d64ef86254" +content-hash = "80772072960025e2e1c804a7d8cc2cc2afe024aba13a30f16af022a0258072f9" diff --git a/pyproject.toml b/pyproject.toml index f7ce635dbc9..56844398d85 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ mypy-boto3-s3 = "^1.26.127" mypy-boto3-xray = "^1.26.122" types-requests = "^2.30.0" typing-extensions = "^4.4.0" -mkdocs-material = "^9.1.11" +mkdocs-material = "^9.1.12" filelock = "^3.12.0" checksumdir = "^1.2.0" mypy-boto3-appconfigdata = "^1.26.70" From 8944f382b76c86e41a4c8fc514bcc89dc21f413c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 13 May 2023 21:47:21 +0100 Subject: [PATCH 35/76] chore(ci): changelog rebuild (#2258) Co-authored-by: Powertools bot --- CHANGELOG.md | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e47008ec5d9..4839a50912a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,25 +10,28 @@ ## Maintenance -* **ci:** skip analytics on forks ([#2225](https://github.com/awslabs/aws-lambda-powertools-python/issues/2225)) +* **ci:** enforce zero trust for third party workflows ([#2215](https://github.com/awslabs/aws-lambda-powertools-python/issues/2215)) +* **ci:** remove auto-merge workflow ([#2214](https://github.com/awslabs/aws-lambda-powertools-python/issues/2214)) +* **ci:** filter out bot commits from CHANGELOG * **ci:** fail create-pr when branch cannot be created or behind tip * **ci:** update layer ARN docs and create PR during release ([#2240](https://github.com/awslabs/aws-lambda-powertools-python/issues/2240)) * **ci:** bump package version after release via pull request ([#2239](https://github.com/awslabs/aws-lambda-powertools-python/issues/2239)) * **ci:** convert create-pr steps into composite action ([#2238](https://github.com/awslabs/aws-lambda-powertools-python/issues/2238)) -* **ci:** enforce zero trust for third party workflows ([#2215](https://github.com/awslabs/aws-lambda-powertools-python/issues/2215)) -* **ci:** remove auto-merge workflow ([#2214](https://github.com/awslabs/aws-lambda-powertools-python/issues/2214)) * **ci:** schedule changelog to rebuild daily at 8am, and on release only ([#2216](https://github.com/awslabs/aws-lambda-powertools-python/issues/2216)) * **ci:** create pull request on changelog update ([#2224](https://github.com/awslabs/aws-lambda-powertools-python/issues/2224)) -* **ci:** filter out bot commits from CHANGELOG +* **ci:** skip analytics on forks ([#2225](https://github.com/awslabs/aws-lambda-powertools-python/issues/2225)) * **deps:** bump zgosalvez/github-actions-ensure-sha-pinned-actions from 2.1.2 to 2.1.3 ([#2227](https://github.com/awslabs/aws-lambda-powertools-python/issues/2227)) +* **deps-dev:** bump mkdocs-material from 9.1.11 to 9.1.12 ([#2253](https://github.com/awslabs/aws-lambda-powertools-python/issues/2253)) * **deps-dev:** bump cfn-lint from 0.77.4 to 0.77.5 ([#2228](https://github.com/awslabs/aws-lambda-powertools-python/issues/2228)) * **deps-dev:** bump mkdocs-material from 9.1.9 to 9.1.11 ([#2229](https://github.com/awslabs/aws-lambda-powertools-python/issues/2229)) +* **deps-dev:** bump types-python-dateutil from 2.8.19.12 to 2.8.19.13 ([#2234](https://github.com/awslabs/aws-lambda-powertools-python/issues/2234)) +* **deps-dev:** bump mypy from 1.2.0 to 1.3.0 ([#2233](https://github.com/awslabs/aws-lambda-powertools-python/issues/2233)) * **deps-dev:** bump mypy-boto3-cloudwatch from 1.26.99 to 1.26.127 ([#2219](https://github.com/awslabs/aws-lambda-powertools-python/issues/2219)) * **deps-dev:** bump types-requests from 2.29.0.0 to 2.30.0.0 ([#2220](https://github.com/awslabs/aws-lambda-powertools-python/issues/2220)) * **deps-dev:** bump mypy-boto3-s3 from 1.26.116 to 1.26.127 ([#2218](https://github.com/awslabs/aws-lambda-powertools-python/issues/2218)) -* **deps-dev:** bump types-python-dateutil from 2.8.19.12 to 2.8.19.13 ([#2234](https://github.com/awslabs/aws-lambda-powertools-python/issues/2234)) -* **deps-dev:** bump mypy from 1.2.0 to 1.3.0 ([#2233](https://github.com/awslabs/aws-lambda-powertools-python/issues/2233)) * **deps-dev:** bump aws-cdk from 2.78.0 to 2.79.0 ([#2235](https://github.com/awslabs/aws-lambda-powertools-python/issues/2235)) +* **deps-dev:** bump aws-cdk from 2.79.0 to 2.79.1 ([#2252](https://github.com/awslabs/aws-lambda-powertools-python/issues/2252)) +* **deps-dev:** bump pytest-xdist from 3.2.1 to 3.3.0 ([#2251](https://github.com/awslabs/aws-lambda-powertools-python/issues/2251)) * **deps-dev:** bump aws-cdk from 2.77.0 to 2.78.0 ([#2202](https://github.com/awslabs/aws-lambda-powertools-python/issues/2202)) From 4eaed998570d5bf0ce0df8653a3b607ed0b43d35 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 May 2023 22:30:21 +0100 Subject: [PATCH 36/76] chore(deps): bump codecov/codecov-action from 3.1.3 to 3.1.4 (#2263) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/python_build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index 9a6706caed9..31769f11b17 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -53,7 +53,7 @@ jobs: - name: Complexity baseline run: make complexity-baseline - name: Upload coverage to Codecov - uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # 3.1.3 + uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # 3.1.4 with: file: ./coverage.xml env_vars: PYTHON From 099487e065cc9603bd561fda7892f1a1ca7924d6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 May 2023 22:31:02 +0100 Subject: [PATCH 37/76] chore(deps-dev): bump sentry-sdk from 1.22.2 to 1.23.0 (#2264) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7f9ffbcaae0..a29df85f02f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2657,14 +2657,14 @@ pbr = "*" [[package]] name = "sentry-sdk" -version = "1.22.2" +version = "1.23.0" description = "Python client for Sentry (https://sentry.io)" category = "dev" optional = false python-versions = "*" files = [ - {file = "sentry-sdk-1.22.2.tar.gz", hash = "sha256:5932c092c6e6035584eb74d77064e4bce3b7935dfc4a331349719a40db265840"}, - {file = "sentry_sdk-1.22.2-py2.py3-none-any.whl", hash = "sha256:cf89a5063ef84278d186aceaed6fb595bfe67d099298e537634a323664265669"}, + {file = "sentry-sdk-1.23.0.tar.gz", hash = "sha256:58f4ff9e76c21bc7172eeec9f1bccb3ff2247c74c71d5590438ce36c803f46ea"}, + {file = "sentry_sdk-1.23.0-py2.py3-none-any.whl", hash = "sha256:01b56a276642d31cf9b4aaf0b55938677265d7006be4785a10ef6330d0f5bba9"}, ] [package.dependencies] @@ -2681,10 +2681,11 @@ chalice = ["chalice (>=1.16.0)"] django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] grpcio = ["grpcio (>=1.21.1)"] httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] +loguru = ["loguru (>=0.5)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] pure-eval = ["asttokens", "executing", "pure-eval"] pymongo = ["pymongo (>=3.1)"] From ef09c8b81b663ed9be3beb01785609429a2a049f Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 16 May 2023 10:26:32 +0100 Subject: [PATCH 38/76] docs(feature_flags): snippets split, improved, and lint (#2222) Co-authored-by: Ruben Fonseca --- docs/utilities/feature_flags.md | 687 +++++------------- examples/feature_flags/sam/template.yaml | 60 ++ .../src/appconfig_provider_options.py | 45 ++ .../appconfig_provider_options_features.json | 11 + .../appconfig_provider_options_payload.json | 4 + examples/feature_flags/src/beyond_boolean.py | 18 + .../src/beyond_boolean_features.json | 22 + .../src/beyond_boolean_payload.json | 5 + examples/feature_flags/src/conditions.json | 9 + .../src/custom_s3_store_provider.py | 38 + .../feature_flags/src/datetime_feature.py | 29 +- ...me_feature.json => datetime_features.json} | 0 .../feature_flags/src/extracting_envelope.py | 22 + .../src/extracting_envelope_features.json | 11 + .../src/extracting_envelope_payload.json | 4 + .../feature_flags/src/feature_with_rules.json | 32 + .../src/getting_all_enabled_features.py | 42 ++ ...getting_all_enabled_features_features.json | 41 ++ .../getting_all_enabled_features_payload.json | 10 + .../getting_started_single_feature_flag.py | 34 + ..._started_single_feature_flag_features.json | 20 + ...g_started_single_feature_flag_payload.json | 5 + .../src/getting_started_static_flag.py | 24 + .../getting_started_static_flag_features.json | 5 + .../getting_started_static_flag_payload.json | 4 + .../src/getting_started_with_cache.py | 24 + .../getting_started_with_cache_features.json | 5 + .../getting_started_with_cache_payload.json | 4 + .../src/getting_started_with_tests.py | 52 ++ .../src/getting_stored_features.py | 10 + .../feature_flags/src/minimal_schema.json | 9 + .../feature_flags/src/timebased_feature.py | 36 +- .../src/timebased_happyhour_feature.py | 28 +- .../src/working_with_own_s3_store_provider.py | 22 + ...g_with_own_s3_store_provider_features.json | 5 + ...ng_with_own_s3_store_provider_payload.json | 4 + 36 files changed, 860 insertions(+), 521 deletions(-) create mode 100644 examples/feature_flags/sam/template.yaml create mode 100644 examples/feature_flags/src/appconfig_provider_options.py create mode 100644 examples/feature_flags/src/appconfig_provider_options_features.json create mode 100644 examples/feature_flags/src/appconfig_provider_options_payload.json create mode 100644 examples/feature_flags/src/beyond_boolean.py create mode 100644 examples/feature_flags/src/beyond_boolean_features.json create mode 100644 examples/feature_flags/src/beyond_boolean_payload.json create mode 100644 examples/feature_flags/src/conditions.json create mode 100644 examples/feature_flags/src/custom_s3_store_provider.py rename examples/feature_flags/src/{datetime_feature.json => datetime_features.json} (100%) create mode 100644 examples/feature_flags/src/extracting_envelope.py create mode 100644 examples/feature_flags/src/extracting_envelope_features.json create mode 100644 examples/feature_flags/src/extracting_envelope_payload.json create mode 100644 examples/feature_flags/src/feature_with_rules.json create mode 100644 examples/feature_flags/src/getting_all_enabled_features.py create mode 100644 examples/feature_flags/src/getting_all_enabled_features_features.json create mode 100644 examples/feature_flags/src/getting_all_enabled_features_payload.json create mode 100644 examples/feature_flags/src/getting_started_single_feature_flag.py create mode 100644 examples/feature_flags/src/getting_started_single_feature_flag_features.json create mode 100644 examples/feature_flags/src/getting_started_single_feature_flag_payload.json create mode 100644 examples/feature_flags/src/getting_started_static_flag.py create mode 100644 examples/feature_flags/src/getting_started_static_flag_features.json create mode 100644 examples/feature_flags/src/getting_started_static_flag_payload.json create mode 100644 examples/feature_flags/src/getting_started_with_cache.py create mode 100644 examples/feature_flags/src/getting_started_with_cache_features.json create mode 100644 examples/feature_flags/src/getting_started_with_cache_payload.json create mode 100644 examples/feature_flags/src/getting_started_with_tests.py create mode 100644 examples/feature_flags/src/getting_stored_features.py create mode 100644 examples/feature_flags/src/minimal_schema.json create mode 100644 examples/feature_flags/src/working_with_own_s3_store_provider.py create mode 100644 examples/feature_flags/src/working_with_own_s3_store_provider_features.json create mode 100644 examples/feature_flags/src/working_with_own_s3_store_provider_payload.json diff --git a/docs/utilities/feature_flags.md b/docs/utilities/feature_flags.md index 89393ddd54f..efe41c2f82f 100644 --- a/docs/utilities/feature_flags.md +++ b/docs/utilities/feature_flags.md @@ -6,7 +6,7 @@ description: Utility The feature flags utility provides a simple rule engine to define when one or multiple features should be enabled depending on the input. ???+ info - We currently only support AppConfig using [freeform configuration profile](https://docs.aws.amazon.com/appconfig/latest/userguide/appconfig-creating-configuration-and-profile.html#appconfig-creating-configuration-and-profile-free-form-configurations). + When using `AppConfigStore`, we currently only support AppConfig using [freeform configuration profile](https://docs.aws.amazon.com/appconfig/latest/userguide/appconfig-creating-configuration-and-profile.html#appconfig-creating-configuration-and-profile-free-form-configurations){target="_blank"} . ## Terminology @@ -24,94 +24,37 @@ Feature flags are used to modify behaviour without changing the application's co If you want to learn more about feature flags, their variations and trade-offs, check these articles: -* [Feature Toggles (aka Feature Flags) - Pete Hodgson](https://martinfowler.com/articles/feature-toggles.html) -* [AWS Lambda Feature Toggles Made Simple - Ran Isenberg](https://isenberg-ran.medium.com/aws-lambda-feature-toggles-made-simple-580b0c444233) -* [Feature Flags Getting Started - CloudBees](https://www.cloudbees.com/blog/ultimate-feature-flag-guide) +* [Feature Toggles (aka Feature Flags) - Pete Hodgson](https://martinfowler.com/articles/feature-toggles.html){target="_blank"} +* [AWS Lambda Feature Toggles Made Simple - Ran Isenberg](https://isenberg-ran.medium.com/aws-lambda-feature-toggles-made-simple-580b0c444233){target="_blank"} +* [Feature Flags Getting Started - CloudBees](https://www.cloudbees.com/blog/ultimate-feature-flag-guide){target="_blank"} ???+ note - AWS AppConfig requires two API calls to fetch configuration for the first time. You can improve latency by consolidating your feature settings in a single [Configuration](https://docs.aws.amazon.com/appconfig/latest/userguide/appconfig-creating-configuration-and-profile.html). + AWS AppConfig requires two API calls to fetch configuration for the first time. You can improve latency by consolidating your feature settings in a single [Configuration](https://docs.aws.amazon.com/appconfig/latest/userguide/appconfig-creating-configuration-and-profile.html){target="_blank"}. ## Key features * Define simple feature flags to dynamically decide when to enable a feature * Fetch one or all feature flags enabled for a given application context * Support for static feature flags to simply turn on/off a feature without rules +* Support for time based feature flags +* Bring your own Feature Flags Store Provider ## Getting started ### IAM Permissions -Your Lambda function IAM Role must have `appconfig:GetLatestConfiguration` and `appconfig:StartConfigurationSession` IAM permissions before using this feature. +When using the default store `AppConfigStore`, your Lambda function IAM Role must have `appconfig:GetLatestConfiguration` and `appconfig:StartConfigurationSession` IAM permissions before using this feature. ### Required resources -By default, this utility provides [AWS AppConfig](https://docs.aws.amazon.com/appconfig/latest/userguide/what-is-appconfig.html) as a configuration store. +By default, this utility provides [AWS AppConfig](https://docs.aws.amazon.com/appconfig/latest/userguide/what-is-appconfig.html){target="_blank"} as a configuration store. The following sample infrastructure will be used throughout this documentation: === "template.yaml" ```yaml hl_lines="5 11 18 25 31-50 54" - AWSTemplateFormatVersion: "2010-09-09" - Description: Lambda Powertools for Python Feature flags sample template - Resources: - FeatureStoreApp: - Type: AWS::AppConfig::Application - Properties: - Description: "AppConfig Application for feature toggles" - Name: product-catalogue - - FeatureStoreDevEnv: - Type: AWS::AppConfig::Environment - Properties: - ApplicationId: !Ref FeatureStoreApp - Description: "Development Environment for the App Config Store" - Name: dev - - FeatureStoreConfigProfile: - Type: AWS::AppConfig::ConfigurationProfile - Properties: - ApplicationId: !Ref FeatureStoreApp - Name: features - LocationUri: "hosted" - - HostedConfigVersion: - Type: AWS::AppConfig::HostedConfigurationVersion - Properties: - ApplicationId: !Ref FeatureStoreApp - ConfigurationProfileId: !Ref FeatureStoreConfigProfile - Description: 'A sample hosted configuration version' - Content: | - { - "premium_features": { - "default": false, - "rules": { - "customer tier equals premium": { - "when_match": true, - "conditions": [ - { - "action": "EQUALS", - "key": "tier", - "value": "premium" - } - ] - } - } - }, - "ten_percent_off_campaign": { - "default": false - } - } - ContentType: 'application/json' - - ConfigDeployment: - Type: AWS::AppConfig::Deployment - Properties: - ApplicationId: !Ref FeatureStoreApp - ConfigurationProfileId: !Ref FeatureStoreConfigProfile - ConfigurationVersion: !Ref HostedConfigVersion - DeploymentStrategyId: "AppConfig.AllAtOnce" - EnvironmentId: !Ref FeatureStoreDevEnv + --8<-- "examples/feature_flags/sam/template.yaml" ``` === "CDK" @@ -187,64 +130,21 @@ The `evaluate` method supports two optional parameters: * **context**: Value to be evaluated against each rule defined for the given feature * **default**: Sentinel value to use in case we experience any issues with our store, or feature doesn't exist -=== "app.py" - - ```python hl_lines="3 9 13 17-19" - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore - - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="features" - ) - - feature_flags = FeatureFlags(store=app_config) +=== "getting_started_single_feature_flag.py" - def lambda_handler(event, context): - # Get customer's tier from incoming request - ctx = { "tier": event.get("tier", "standard") } - - # Evaluate whether customer's tier has access to premium features - # based on `has_premium_features` rules - has_premium_features: bool = feature_flags.evaluate(name="premium_features", - context=ctx, default=False) - if has_premium_features: - # enable premium features - ... + ```python hl_lines="3 6 8 27 31" + --8<-- "examples/feature_flags/src/getting_started_single_feature_flag.py" ``` -=== "event.json" +=== "getting_started_single_feature_flag_payload.json" ```json hl_lines="3" - { - "username": "lessa", - "tier": "premium", - "basked_id": "random_id" - } + --8<-- "examples/feature_flags/src/getting_started_single_feature_flag_payload.json" ``` -=== "features.json" +=== "getting_started_single_feature_flag_features.json" ```json hl_lines="2 6 9-11" - { - "premium_features": { - "default": false, - "rules": { - "customer tier equals premium": { - "when_match": true, - "conditions": [ - { - "action": "EQUALS", - "key": "tier", - "value": "premium" - } - ] - } - } - }, - "ten_percent_off_campaign": { - "default": false - } - } + --8<-- "examples/feature_flags/src/getting_started_single_feature_flag_features.json" ``` #### Static flags @@ -253,36 +153,21 @@ We have a static flag named `ten_percent_off_campaign`. Meaning, there are no co In this case, we could omit the `context` parameter and simply evaluate whether we should apply the 10% discount. -=== "app.py" +=== "getting_started_static_flag.py" - ```python hl_lines="12-13" - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore - - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="features" - ) - - feature_flags = FeatureFlags(store=app_config) - - def lambda_handler(event, context): - apply_discount: bool = feature_flags.evaluate(name="ten_percent_off_campaign", - default=False) + ```python hl_lines="3 8 16" + --8<-- "examples/feature_flags/src/getting_started_static_flag.py" + ``` +=== "getting_started_static_flag_payload.json" - if apply_discount: - # apply 10% discount to product - ... + ```json hl_lines="2-3" + --8<-- "examples/feature_flags/src/getting_started_static_flag_payload.json" ``` -=== "features.json" +=== "getting_started_static_flag_features.json" - ```json hl_lines="2-3" - { - "ten_percent_off_campaign": { - "default": false - } - } + ```json hl_lines="2-4" + --8<-- "examples/feature_flags/src/getting_started_static_flag_features.json" ``` ### Getting all enabled features @@ -291,163 +176,25 @@ As you might have noticed, each `evaluate` call means an API call to the Store a You can use `get_enabled_features` method for scenarios where you need a list of all enabled features according to the input context. -=== "app.py" - - ```python hl_lines="17-20 23" - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore +=== "getting_all_enabled_features.py" - app = APIGatewayRestResolver() - - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="features" - ) - - feature_flags = FeatureFlags(store=app_config) - - @app.get("/products") - def list_products(): - ctx = { - **app.current_event.headers, - **app.current_event.json_body - } - - # all_features is evaluated to ["geo_customer_campaign", "ten_percent_off_campaign"] - all_features: list[str] = feature_flags.get_enabled_features(context=ctx) - - if "geo_customer_campaign" in all_features: - # apply discounts based on geo - ... - - if "ten_percent_off_campaign" in all_features: - # apply additional 10% for all customers - ... - - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="2 9 26" + --8<-- "examples/feature_flags/src/getting_all_enabled_features.py" ``` -=== "event.json" +=== "getting_all_enabled_features_payload.json" ```json hl_lines="2 8" - { - "body": "{\"username\": \"lessa\", \"tier\": \"premium\", \"basked_id\": \"random_id\"}", - "resource": "/products", - "path": "/products", - "httpMethod": "GET", - "isBase64Encoded": false, - "headers": { - "CloudFront-Viewer-Country": "NL" - } - } - ``` -=== "features.json" - - ```json hl_lines="17-18 20 27-29" - { - "premium_features": { - "default": false, - "rules": { - "customer tier equals premium": { - "when_match": true, - "conditions": [ - { - "action": "EQUALS", - "key": "tier", - "value": "premium" - } - ] - } - } - }, - "ten_percent_off_campaign": { - "default": true - }, - "geo_customer_campaign": { - "default": false, - "rules": { - "customer in temporary discount geo": { - "when_match": true, - "conditions": [ - { - "action": "KEY_IN_VALUE", - "key": "CloudFront-Viewer-Country", - "value": ["NL", "IE", "UK", "PL", "PT"] - } - ] - } - } - } - } + --8<-- "examples/feature_flags/src/getting_all_enabled_features_payload.json" ``` -### Beyond boolean feature flags +=== "getting_all_enabled_features_features.json" -???+ info "When is this useful?" - You might have a list of features to unlock for premium customers, unlock a specific set of features for admin users, etc. - -Feature flags can return any JSON values when `boolean_type` parameter is set to `false`. These can be dictionaries, list, string, integers, etc. - -=== "app.py" - - ```python hl_lines="3 9 13 16 18" - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore - - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="features" - ) - - feature_flags = FeatureFlags(store=app_config) - - def lambda_handler(event, context): - # Get customer's tier from incoming request - ctx = { "tier": event.get("tier", "standard") } - - # Evaluate `has_premium_features` base don customer's tier - premium_features: list[str] = feature_flags.evaluate(name="premium_features", - context=ctx, default=False) - for feature in premium_features: - # enable premium features - ... - ``` - -=== "event.json" - - ```json hl_lines="3" - { - "username": "lessa", - "tier": "premium", - "basked_id": "random_id" - } - ``` -=== "features.json" - - ```json hl_lines="3-4 7" - { - "premium_features": { - "boolean_type": false, - "default": [], - "rules": { - "customer tier equals premium": { - "when_match": ["no_ads", "no_limits", "chat"], - "conditions": [ - { - "action": "EQUALS", - "key": "tier", - "value": "premium" - } - ] - } - } - } - } + ```json hl_lines="2 8-12 17-18 20 27-28 30" + --8<-- "examples/feature_flags/src/getting_all_enabled_features_features.json" ``` -#### Time based feature flags +### Time based feature flags Feature flags can also return enabled features based on time or datetime ranges. This allows you to have features that are only enabled on certain days of the week, certain time @@ -461,19 +208,19 @@ Use cases: You can also have features enabled only at certain times of the day for premium tier customers -=== "app.py" +=== "timebased_feature.py" - ```python hl_lines="12" + ```python hl_lines="1 6 40" --8<-- "examples/feature_flags/src/timebased_feature.py" ``` -=== "event.json" +=== "timebased_feature_event.json" ```json hl_lines="3" --8<-- "examples/feature_flags/src/timebased_feature_event.json" ``` -=== "features.json" +=== "timebased_features.json" ```json hl_lines="9-11 14-21" --8<-- "examples/feature_flags/src/timebased_features.json" @@ -481,40 +228,65 @@ You can also have features enabled only at certain times of the day for premium You can also have features enabled only at certain times of the day. -=== "app.py" +=== "timebased_happyhour_feature.py" - ```python hl_lines="9" + ```python hl_lines="1 6 29" --8<-- "examples/feature_flags/src/timebased_happyhour_feature.py" ``` -=== "features.json" +=== "timebased_happyhour_features.json" - ```json hl_lines="9-15" + ```json hl_lines="9-14" --8<-- "examples/feature_flags/src/timebased_happyhour_features.json" ``` You can also have features enabled only at specific days, for example: enable christmas sale discount during specific dates. -=== "app.py" +=== "datetime_feature.py" - ```python hl_lines="10" + ```python hl_lines="1 6 31" --8<-- "examples/feature_flags/src/datetime_feature.py" ``` -=== "features.json" +=== "datetime_features.json" ```json hl_lines="9-14" - --8<-- "examples/feature_flags/src/datetime_feature.json" + --8<-- "examples/feature_flags/src/datetime_features.json" ``` ???+ info "How should I use timezones?" - You can use any [IANA time zone](https://www.iana.org/time-zones) (as originally specified - in [PEP 615](https://peps.python.org/pep-0615/)) as part of your rules definition. + You can use any [IANA time zone](https://www.iana.org/time-zones){target="_blank"} (as originally specified + in [PEP 615](https://peps.python.org/pep-0615/){target="_blank"}) as part of your rules definition. Powertools takes care of converting and calculate the correct timestamps for you. When using `SCHEDULE_BETWEEN_DATETIME_RANGE`, use timestamps without timezone information, and specify the timezone manually. This way, you'll avoid hitting problems with day light savings. +### Beyond boolean feature flags + +???+ info "When is this useful?" + You might have a list of features to unlock for premium customers, unlock a specific set of features for admin users, etc. + +Feature flags can return any JSON values when `boolean_type` parameter is set to `false`. These can be dictionaries, list, string, integers, etc. + +=== "beyond_boolean.py" + + ```python hl_lines="3 8 16" + --8<-- "examples/feature_flags/src/beyond_boolean.py" + ``` + +=== "beyond_boolean_payload.json" + + ```json hl_lines="3" + --8<-- "examples/feature_flags/src/beyond_boolean_payload.json" + ``` + +=== "beyond_boolean_features.json" + + ```json hl_lines="7-11 14-16" + --8<-- "examples/feature_flags/src/beyond_boolean_features.json" + ``` + ## Advanced ### Adjusting in-memory cache @@ -523,17 +295,21 @@ By default, we cache configuration retrieved from the Store for 5 seconds for pe You can override `max_age` parameter when instantiating the store. -=== "app.py" +=== "getting_started_with_cache.py" - ```python hl_lines="7" - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore + ```python hl_lines="6" + --8<-- "examples/feature_flags/src/getting_started_with_cache.py" + ``` +=== "getting_started_with_cache_payload.json" + + ```json hl_lines="2-3" + --8<-- "examples/feature_flags/src/getting_started_with_cache_payload.json" + ``` - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="features", - max_age=300 - ) +=== "getting_started_with_cache_features.json" + + ```json hl_lines="2-4" + --8<-- "examples/feature_flags/src/getting_started_with_cache_features.json" ``` ### Getting fetched configuration @@ -545,21 +321,10 @@ You can override `max_age` parameter when instantiating the store. You can access the configuration fetched from the store via `get_raw_configuration` property within the store instance. -=== "app.py" - - ```python hl_lines="12" - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore - - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="configuration", - envelope = "feature_flags" - ) - - feature_flags = FeatureFlags(store=app_config) +=== "getting_stored_features.py" - config = app_config.get_raw_configuration + ```python hl_lines="9" + --8<-- "examples/feature_flags/src/getting_stored_features.py" ``` ### Schema @@ -570,17 +335,11 @@ This utility expects a certain schema to be stored as JSON within AWS AppConfig. A feature can simply have its name and a `default` value. This is either on or off, also known as a [static flag](#static-flags). -```json hl_lines="2-3 5-7" title="minimal_schema.json" -{ - "global_feature": { - "default": true - }, - "non_boolean_global_feature": { - "default": {"group": "read-only"}, - "boolean_type": false - }, -} -``` +=== "minimal_schema.json" + + ```json hl_lines="2-3 5-7" + --8<-- "examples/feature_flags/src/minimal_schema.json" + ``` If you need more control and want to provide context such as user group, permissions, location, etc., you need to add rules to your feature flag configuration. @@ -592,40 +351,11 @@ When adding `rules` to a feature, they must contain: 2. `when_match` boolean or JSON value that should be used when conditions match 3. A list of `conditions` for evaluation - ```json hl_lines="4-11 19-26" title="feature_with_rules.json" - { - "premium_feature": { - "default": false, - "rules": { - "customer tier equals premium": { - "when_match": true, - "conditions": [ - { - "action": "EQUALS", - "key": "tier", - "value": "premium" - } - ] - } - } - }, - "non_boolean_premium_feature": { - "default": [], - "rules": { - "customer tier equals premium": { - "when_match": ["remove_limits", "remove_ads"], - "conditions": [ - { - "action": "EQUALS", - "key": "tier", - "value": "premium" - } - ] - } - } - } - } - ``` +=== "feature_with_rules.json" + + ```json hl_lines="4-11 19-26" + --8<-- "examples/feature_flags/src/feature_with_rules.json" + ``` You can have multiple rules with different names. The rule engine will return the first result `when_match` of the matching rule configuration, or `default` value when none of the rules apply. @@ -633,18 +363,11 @@ You can have multiple rules with different names. The rule engine will return th The `conditions` block is a list of conditions that contain `action`, `key`, and `value` keys: -```json hl_lines="5-7" title="conditions.json" -{ - ... - "conditions": [ - { - "action": "EQUALS", - "key": "tier", - "value": "premium" - } - ] -} -``` +=== "conditions.json" + + ```json hl_lines="5-7" + --8<-- "examples/feature_flags/src/conditions.json" + ``` The `action` configuration can have the following values, where the expressions **`a`** is the `key` and **`b`** is the `value` above: @@ -667,17 +390,17 @@ The `action` configuration can have the following values, where the expressions | **SCHEDULE_BETWEEN_DAYS_OF_WEEK** | `lambda a, b: day_of_week(a) in b` | ???+ info - The `**key**` and `**value**` will be compared to the input from the `**context**` parameter. + The `key` and `value` will be compared to the input from the `context` parameter. ???+ "Time based keys" For time based keys, we provide a list of predefined keys. These will automatically get converted to the corresponding timestamp on each invocation of your Lambda function. - | Key | Meaning | - | ------------------- | ------------------------------------------------------------------------ | - | CURRENT_TIME | The current time, 24 hour format (HH:mm) | - | CURRENT_DATETIME | The current datetime ([ISO8601](https://en.wikipedia.org/wiki/ISO_8601)) | - | CURRENT_DAY_OF_WEEK | The current day of the week (Monday-Sunday) | + | Key | Meaning | + | ------------------- | ----------------------------------------------------------------------------------------- | + | CURRENT_TIME | The current time, 24 hour format (HH:mm) | + | CURRENT_DATETIME | The current datetime ([ISO8601](https://en.wikipedia.org/wiki/ISO_8601){target="_blank"}) | + | CURRENT_DAY_OF_WEEK | The current day of the week (Monday-Sunday) | If not specified, the timezone used for calculations will be UTC. @@ -695,55 +418,26 @@ There are scenarios where you might want to include feature flags as part of an For this to work, you need to use a JMESPath expression via the `envelope` parameter to extract that key as the feature flags configuration. -=== "app.py" +=== "extracting_envelope.py" ```python hl_lines="7" - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore + --8<-- "examples/feature_flags/src/extracting_envelope.py" + ``` + +=== "extracting_envelope_payload.json" - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="configuration", - envelope = "feature_flags" - ) + ```json hl_lines="2-3" + --8<-- "examples/feature_flags/src/extracting_envelope_payload.json" ``` -=== "configuration.json" +=== "extracting_envelope_features.json" ```json hl_lines="6" - { - "logging": { - "level": "INFO", - "sampling_rate": 0.1 - }, - "feature_flags": { - "premium_feature": { - "default": false, - "rules": { - "customer tier equals premium": { - "when_match": true, - "conditions": [ - { - "action": "EQUALS", - "key": "tier", - "value": "premium" - } - ] - } - } - }, - "feature2": { - "default": false - } - } - } + --8<-- "examples/feature_flags/src/extracting_envelope_features.json" ``` ### Built-in store provider -???+ info - For GA, you'll be able to bring your own store. - #### AppConfig AppConfig store provider fetches any JSON document from AWS AppConfig. @@ -752,45 +446,68 @@ These are the available options for further customization. | Parameter | Default | Description | | -------------------- | ---------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------ | -| **environment** | `""` | AWS AppConfig Environment, e.g. `test` | -| **application** | `""` | AWS AppConfig Application | -| **name** | `""` | AWS AppConfig Configuration name | +| **environment** | `""` | AWS AppConfig Environment, e.g. `dev` | +| **application** | `""` | AWS AppConfig Application, e.g. `product-catalogue` | +| **name** | `""` | AWS AppConfig Configuration name, e.g `features` | | **envelope** | `None` | JMESPath expression to use to extract feature flags configuration from AWS AppConfig configuration | | **max_age** | `5` | Number of seconds to cache feature flags configuration fetched from AWS AppConfig | | **sdk_config** | `None` | [Botocore Config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html){target="_blank"} | | **jmespath_options** | `None` | For advanced use cases when you want to bring your own [JMESPath functions](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank"} | | **logger** | `logging.Logger` | Logger to use for debug. You can optionally supply an instance of Powertools Logger. | -```python hl_lines="21-27" title="AppConfigStore sample" -from botocore.config import Config +=== "appconfig_provider_options.py" + + ```python hl_lines="9 13-17 20 28-30" + --8<-- "examples/feature_flags/src/appconfig_provider_options.py" + ``` + +=== "appconfig_provider_options_payload.json" + + ```json hl_lines="2 3" + --8<-- "examples/feature_flags/src/appconfig_provider_options_payload.json" + ``` + +=== "appconfig_provider_options_features.json" + + ```json hl_lines="6-9" + --8<-- "examples/feature_flags/src/appconfig_provider_options_features.json" + ``` + +### Create your own store provider -import jmespath +You can create your own custom FeatureFlags store provider by inheriting the `StoreProvider` class, and implementing both `get_raw_configuration()` and `get_configuration()` methods to retrieve the configuration from your custom store. -from aws_lambda_powertools.utilities.feature_flags import AppConfigStore +* **`get_raw_configuration()`** – get the raw configuration from the store provider and return the parsed JSON dictionary +* **`get_configuration()`** – get the configuration from the store provider, parsing it as a JSON dictionary. If an envelope is set, extract the envelope data -boto_config = Config(read_timeout=10, retries={"total_max_attempts": 2}) +Here are an example of implementing a custom store provider using Amazon S3, a popular object storage. -# Custom JMESPath functions -class CustomFunctions(jmespath.functions.Functions): +???+ note + This is just one example of how you can create your own store provider. Before creating a custom store provider, carefully evaluate your requirements and consider factors such as performance, scalability, and ease of maintenance. - @jmespath.functions.signature({'types': ['string']}) - def _func_special_decoder(self, s): - return my_custom_decoder_logic(s) +=== "working_with_own_s3_store_provider.py" + ```python hl_lines="3 8 10" + --8<-- "examples/feature_flags/src/working_with_own_s3_store_provider.py" + ``` -custom_jmespath_options = {"custom_functions": CustomFunctions()} +=== "custom_s3_store_provider.py" + ```python hl_lines="33 37" + --8<-- "examples/feature_flags/src/custom_s3_store_provider.py" + ``` -app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="configuration", - max_age=120, - envelope = "features", - sdk_config=boto_config, - jmespath_options=custom_jmespath_options -) -``` +=== "working_with_own_s3_store_provider_payload.json" + + ```json hl_lines="2 3" + --8<-- "examples/feature_flags/src/working_with_own_s3_store_provider_payload.json" + ``` + +=== "working_with_own_s3_store_provider_features.json" + + ```json hl_lines="2-4" + --8<-- "examples/feature_flags/src/working_with_own_s3_store_provider_features.json" + ``` ## Testing your code @@ -801,70 +518,16 @@ You can unit test your feature flags locally and independently without setting u ???+ warning This excerpt relies on `pytest` and `pytest-mock` dependencies. -```python hl_lines="7-9" title="Unit testing feature flags" -from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore, RuleAction - - -def init_feature_flags(mocker, mock_schema, envelope="") -> FeatureFlags: - """Mock AppConfig Store get_configuration method to use mock schema instead""" - - method_to_mock = "aws_lambda_powertools.utilities.feature_flags.AppConfigStore.get_configuration" - mocked_get_conf = mocker.patch(method_to_mock) - mocked_get_conf.return_value = mock_schema - - app_conf_store = AppConfigStore( - environment="test_env", - application="test_app", - name="test_conf_name", - envelope=envelope, - ) - - return FeatureFlags(store=app_conf_store) - - -def test_flags_condition_match(mocker): - # GIVEN - expected_value = True - mocked_app_config_schema = { - "my_feature": { - "default": False, - "rules": { - "tenant id equals 12345": { - "when_match": expected_value, - "conditions": [ - { - "action": RuleAction.EQUALS.value, - "key": "tenant_id", - "value": "12345", - } - ], - } - }, - } - } - - # WHEN - ctx = {"tenant_id": "12345", "username": "a"} - feature_flags = init_feature_flags(mocker=mocker, mock_schema=mocked_app_config_schema) - flag = feature_flags.evaluate(name="my_feature", context=ctx, default=False) - - # THEN - assert flag == expected_value -``` - -## Feature flags vs Parameters vs env vars +=== "Testing your code" + + ```python hl_lines="11-13" + --8<-- "examples/feature_flags/src/getting_started_with_tests.py" + ``` + +## Feature flags vs Parameters vs Env vars | Method | When to use | Requires new deployment on changes | Supported services | | --------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | ---------------------------------- | ----------------------------------------------------- | | **[Environment variables](https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html){target="_blank"}** | Simple configuration that will rarely if ever change, because changing it requires a Lambda function deployment. | Yes | Lambda | | **[Parameters utility](parameters.md)** | Access to secrets, or fetch parameters in different formats from AWS System Manager Parameter Store or Amazon DynamoDB. | No | Parameter Store, DynamoDB, Secrets Manager, AppConfig | | **Feature flags utility** | Rule engine to define when one or multiple features should be enabled depending on the input. | No | AppConfig | - -## Deprecation list when GA - -| Breaking change | Recommendation | -| ---------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `IN` RuleAction | Use `KEY_IN_VALUE` instead | -| `NOT_IN` RuleAction | Use `KEY_NOT_IN_VALUE` instead | -| `get_enabled_features` | Return type changes from `List[str]` to `Dict[str, Any]`. New return will contain a list of features enabled and their values. List of enabled features will be in `enabled_features` key to keep ease of assertion we have in Beta. | -| `boolean_type` Schema | This **might** not be necessary anymore before we go GA. We will return either the `default` value when there are no rules as well as `when_match` value. This will simplify on-boarding if we can keep the same set of validations already offered. | diff --git a/examples/feature_flags/sam/template.yaml b/examples/feature_flags/sam/template.yaml new file mode 100644 index 00000000000..944183975ec --- /dev/null +++ b/examples/feature_flags/sam/template.yaml @@ -0,0 +1,60 @@ +AWSTemplateFormatVersion: "2010-09-09" +Description: Lambda Powertools for Python Feature flags sample template +Resources: + FeatureStoreApp: + Type: AWS::AppConfig::Application + Properties: + Description: "AppConfig Application for feature toggles" + Name: product-catalogue + + FeatureStoreDevEnv: + Type: AWS::AppConfig::Environment + Properties: + ApplicationId: !Ref FeatureStoreApp + Description: "Development Environment for the App Config Store" + Name: dev + + FeatureStoreConfigProfile: + Type: AWS::AppConfig::ConfigurationProfile + Properties: + ApplicationId: !Ref FeatureStoreApp + Name: features + LocationUri: "hosted" + + HostedConfigVersion: + Type: AWS::AppConfig::HostedConfigurationVersion + Properties: + ApplicationId: !Ref FeatureStoreApp + ConfigurationProfileId: !Ref FeatureStoreConfigProfile + Description: 'A sample hosted configuration version' + Content: | + { + "premium_features": { + "default": false, + "rules": { + "customer tier equals premium": { + "when_match": true, + "conditions": [ + { + "action": "EQUALS", + "key": "tier", + "value": "premium" + } + ] + } + } + }, + "ten_percent_off_campaign": { + "default": false + } + } + ContentType: 'application/json' + + ConfigDeployment: + Type: AWS::AppConfig::Deployment + Properties: + ApplicationId: !Ref FeatureStoreApp + ConfigurationProfileId: !Ref FeatureStoreConfigProfile + ConfigurationVersion: !Ref HostedConfigVersion + DeploymentStrategyId: "AppConfig.AllAtOnce" + EnvironmentId: !Ref FeatureStoreDevEnv diff --git a/examples/feature_flags/src/appconfig_provider_options.py b/examples/feature_flags/src/appconfig_provider_options.py new file mode 100644 index 00000000000..8a41f651fc9 --- /dev/null +++ b/examples/feature_flags/src/appconfig_provider_options.py @@ -0,0 +1,45 @@ +from typing import Any + +from botocore.config import Config +from jmespath.functions import Functions, signature + +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags +from aws_lambda_powertools.utilities.typing import LambdaContext + +boto_config = Config(read_timeout=10, retries={"total_max_attempts": 2}) + + +# Custom JMESPath functions +class CustomFunctions(Functions): + @signature({"types": ["object"]}) + def _func_special_decoder(self, features): + # You can add some logic here + return features + + +custom_jmespath_options = {"custom_functions": CustomFunctions()} + + +app_config = AppConfigStore( + environment="dev", + application="product-catalogue", + name="features", + max_age=120, + envelope="special_decoder(features)", # using a custom function defined in CustomFunctions Class + sdk_config=boto_config, + jmespath_options=custom_jmespath_options, +) + +feature_flags = FeatureFlags(store=app_config) + + +def lambda_handler(event: dict, context: LambdaContext): + apply_discount: Any = feature_flags.evaluate(name="ten_percent_off_campaign", default=False) + + price: Any = event.get("price") + + if apply_discount: + # apply 10% discount to product + price = price * 0.9 + + return {"price": price} diff --git a/examples/feature_flags/src/appconfig_provider_options_features.json b/examples/feature_flags/src/appconfig_provider_options_features.json new file mode 100644 index 00000000000..a26b0d34e53 --- /dev/null +++ b/examples/feature_flags/src/appconfig_provider_options_features.json @@ -0,0 +1,11 @@ +{ + "logging": { + "level": "INFO", + "sampling_rate": 0.1 + }, + "features": { + "ten_percent_off_campaign": { + "default": true + } + } + } diff --git a/examples/feature_flags/src/appconfig_provider_options_payload.json b/examples/feature_flags/src/appconfig_provider_options_payload.json new file mode 100644 index 00000000000..b2a71282f8e --- /dev/null +++ b/examples/feature_flags/src/appconfig_provider_options_payload.json @@ -0,0 +1,4 @@ +{ + "product": "laptop", + "price": 1000 +} diff --git a/examples/feature_flags/src/beyond_boolean.py b/examples/feature_flags/src/beyond_boolean.py new file mode 100644 index 00000000000..bd5ad021909 --- /dev/null +++ b/examples/feature_flags/src/beyond_boolean.py @@ -0,0 +1,18 @@ +from typing import Any + +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags +from aws_lambda_powertools.utilities.typing import LambdaContext + +app_config = AppConfigStore(environment="dev", application="comments", name="config") + +feature_flags = FeatureFlags(store=app_config) + + +def lambda_handler(event: dict, context: LambdaContext): + # Get customer's tier from incoming request + ctx = {"tier": event.get("tier", "standard")} + + # Evaluate `has_premium_features` based on customer's tier + premium_features: Any = feature_flags.evaluate(name="premium_features", context=ctx, default=[]) + + return {"Premium features enabled": premium_features} diff --git a/examples/feature_flags/src/beyond_boolean_features.json b/examples/feature_flags/src/beyond_boolean_features.json new file mode 100644 index 00000000000..c48754a15f9 --- /dev/null +++ b/examples/feature_flags/src/beyond_boolean_features.json @@ -0,0 +1,22 @@ +{ + "premium_features": { + "boolean_type": false, + "default": [], + "rules": { + "customer tier equals premium": { + "when_match": [ + "no_ads", + "no_limits", + "chat" + ], + "conditions": [ + { + "action": "EQUALS", + "key": "tier", + "value": "premium" + } + ] + } + } + } + } diff --git a/examples/feature_flags/src/beyond_boolean_payload.json b/examples/feature_flags/src/beyond_boolean_payload.json new file mode 100644 index 00000000000..d63f3bff11a --- /dev/null +++ b/examples/feature_flags/src/beyond_boolean_payload.json @@ -0,0 +1,5 @@ +{ + "username": "lessa", + "tier": "premium", + "basked_id": "random_id" +} diff --git a/examples/feature_flags/src/conditions.json b/examples/feature_flags/src/conditions.json new file mode 100644 index 00000000000..30eda640e0f --- /dev/null +++ b/examples/feature_flags/src/conditions.json @@ -0,0 +1,9 @@ +{ + "conditions": [ + { + "action": "EQUALS", + "key": "tier", + "value": "premium" + } + ] +} diff --git a/examples/feature_flags/src/custom_s3_store_provider.py b/examples/feature_flags/src/custom_s3_store_provider.py new file mode 100644 index 00000000000..ea2c8a876be --- /dev/null +++ b/examples/feature_flags/src/custom_s3_store_provider.py @@ -0,0 +1,38 @@ +import json +from typing import Any, Dict + +import boto3 +from botocore.exceptions import ClientError + +from aws_lambda_powertools.utilities.feature_flags.base import StoreProvider +from aws_lambda_powertools.utilities.feature_flags.exceptions import ( + ConfigurationStoreError, +) + + +class S3StoreProvider(StoreProvider): + def __init__(self, bucket_name: str, object_key: str): + # Initialize the client to your custom store provider + + super().__init__() + + self.bucket_name = bucket_name + self.object_key = object_key + self.client = boto3.client("s3") + + def _get_s3_object(self) -> Dict[str, Any]: + # Retrieve the object content + parameters = {"Bucket": self.bucket_name, "Key": self.object_key} + + try: + response = self.client.get_object(**parameters) + return json.loads(response["Body"].read().decode()) + except ClientError as exc: + raise ConfigurationStoreError("Unable to get S3 Store Provider configuration file") from exc + + def get_configuration(self) -> Dict[str, Any]: + return self._get_s3_object() + + @property + def get_raw_configuration(self) -> Dict[str, Any]: + return self._get_s3_object() diff --git a/examples/feature_flags/src/datetime_feature.py b/examples/feature_flags/src/datetime_feature.py index 55c11ea6e7d..7dff14b8008 100644 --- a/examples/feature_flags/src/datetime_feature.py +++ b/examples/feature_flags/src/datetime_feature.py @@ -1,14 +1,37 @@ from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags +from aws_lambda_powertools.utilities.typing import LambdaContext app_config = AppConfigStore(environment="dev", application="product-catalogue", name="features") feature_flags = FeatureFlags(store=app_config) -def lambda_handler(event, context): - # Get customer's tier from incoming request +def lambda_handler(event: dict, context: LambdaContext): + """ + This feature flag is enabled under the following conditions: + - Start date: December 25th, 2022 at 12:00:00 PM EST + - End date: December 31st, 2022 at 11:59:59 PM EST + - Timezone: America/New_York + + Rule condition to be evaluated: + "conditions": [ + { + "action": "SCHEDULE_BETWEEN_DATETIME_RANGE", + "key": "CURRENT_DATETIME", + "value": { + "START": "2022-12-25T12:00:00", + "END": "2022-12-31T23:59:59", + "TIMEZONE": "America/New_York" + } + } + ] + """ + + # Checking if the Christmas discount is enable xmas_discount = feature_flags.evaluate(name="christmas_discount", default=False) if xmas_discount: # Enable special discount on christmas: - pass + return {"message": "The Christmas discount is enabled."} + + return {"message": "The Christmas discount is not enabled."} diff --git a/examples/feature_flags/src/datetime_feature.json b/examples/feature_flags/src/datetime_features.json similarity index 100% rename from examples/feature_flags/src/datetime_feature.json rename to examples/feature_flags/src/datetime_features.json diff --git a/examples/feature_flags/src/extracting_envelope.py b/examples/feature_flags/src/extracting_envelope.py new file mode 100644 index 00000000000..3c3194c0c1a --- /dev/null +++ b/examples/feature_flags/src/extracting_envelope.py @@ -0,0 +1,22 @@ +from typing import Any + +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags +from aws_lambda_powertools.utilities.typing import LambdaContext + +app_config = AppConfigStore( + environment="dev", application="product-catalogue", name="features", envelope="feature_flags" +) + +feature_flags = FeatureFlags(store=app_config) + + +def lambda_handler(event: dict, context: LambdaContext): + apply_discount: Any = feature_flags.evaluate(name="ten_percent_off_campaign", default=False) + + price: Any = event.get("price") + + if apply_discount: + # apply 10% discount to product + price = price * 0.9 + + return {"price": price} diff --git a/examples/feature_flags/src/extracting_envelope_features.json b/examples/feature_flags/src/extracting_envelope_features.json new file mode 100644 index 00000000000..a26b0d34e53 --- /dev/null +++ b/examples/feature_flags/src/extracting_envelope_features.json @@ -0,0 +1,11 @@ +{ + "logging": { + "level": "INFO", + "sampling_rate": 0.1 + }, + "features": { + "ten_percent_off_campaign": { + "default": true + } + } + } diff --git a/examples/feature_flags/src/extracting_envelope_payload.json b/examples/feature_flags/src/extracting_envelope_payload.json new file mode 100644 index 00000000000..b2a71282f8e --- /dev/null +++ b/examples/feature_flags/src/extracting_envelope_payload.json @@ -0,0 +1,4 @@ +{ + "product": "laptop", + "price": 1000 +} diff --git a/examples/feature_flags/src/feature_with_rules.json b/examples/feature_flags/src/feature_with_rules.json new file mode 100644 index 00000000000..60765ebd59b --- /dev/null +++ b/examples/feature_flags/src/feature_with_rules.json @@ -0,0 +1,32 @@ +{ + "premium_feature": { + "default": false, + "rules": { + "customer tier equals premium": { + "when_match": true, + "conditions": [ + { + "action": "EQUALS", + "key": "tier", + "value": "premium" + } + ] + } + } + }, + "non_boolean_premium_feature": { + "default": [], + "rules": { + "customer tier equals premium": { + "when_match": ["remove_limits", "remove_ads"], + "conditions": [ + { + "action": "EQUALS", + "key": "tier", + "value": "premium" + } + ] + } + } + } +} diff --git a/examples/feature_flags/src/getting_all_enabled_features.py b/examples/feature_flags/src/getting_all_enabled_features.py new file mode 100644 index 00000000000..6e3cab50b0d --- /dev/null +++ b/examples/feature_flags/src/getting_all_enabled_features.py @@ -0,0 +1,42 @@ +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags +from aws_lambda_powertools.utilities.typing import LambdaContext + +app = APIGatewayRestResolver() + +app_config = AppConfigStore(environment="dev", application="product-catalogue", name="features") + +feature_flags = FeatureFlags(store=app_config) + + +@app.get("/products") +def list_products(): + # getting fields from request + # https://awslabs.github.io/aws-lambda-powertools-python/latest/core/event_handler/api_gateway/#accessing-request-details + json_body = app.current_event.json_body + headers = app.current_event.headers + + ctx = {**headers, **json_body} + + # getting price from payload + price: float = float(json_body.get("price")) + percent_discount: int = 0 + + # all_features is evaluated to ["premium_features", "geo_customer_campaign", "ten_percent_off_campaign"] + all_features: list[str] = feature_flags.get_enabled_features(context=ctx) + + if "geo_customer_campaign" in all_features: + # apply 20% discounts for customers in NL + percent_discount += 20 + + if "ten_percent_off_campaign" in all_features: + # apply additional 10% for all customers + percent_discount += 10 + + price = price * (100 - percent_discount) / 100 + + return {"price": price} + + +def lambda_handler(event: dict, context: LambdaContext): + return app.resolve(event, context) diff --git a/examples/feature_flags/src/getting_all_enabled_features_features.json b/examples/feature_flags/src/getting_all_enabled_features_features.json new file mode 100644 index 00000000000..1017b872dfb --- /dev/null +++ b/examples/feature_flags/src/getting_all_enabled_features_features.json @@ -0,0 +1,41 @@ +{ + "premium_features": { + "default": false, + "rules": { + "customer tier equals premium": { + "when_match": true, + "conditions": [ + { + "action": "EQUALS", + "key": "tier", + "value": "premium" + } + ] + } + } + }, + "ten_percent_off_campaign": { + "default": true + }, + "geo_customer_campaign": { + "default": false, + "rules": { + "customer in temporary discount geo": { + "when_match": true, + "conditions": [ + { + "action": "KEY_IN_VALUE", + "key": "CloudFront-Viewer-Country", + "value": [ + "NL", + "IE", + "UK", + "PL", + "PT" + ] + } + ] + } + } + } + } diff --git a/examples/feature_flags/src/getting_all_enabled_features_payload.json b/examples/feature_flags/src/getting_all_enabled_features_payload.json new file mode 100644 index 00000000000..cb0a41847e3 --- /dev/null +++ b/examples/feature_flags/src/getting_all_enabled_features_payload.json @@ -0,0 +1,10 @@ +{ + "body": "{\"username\": \"lessa\", \"tier\": \"premium\", \"basked_id\": \"random_id\", \"price\": 1000}", + "resource": "/products", + "path": "/products", + "httpMethod": "GET", + "isBase64Encoded": false, + "headers": { + "CloudFront-Viewer-Country": "NL" + } +} diff --git a/examples/feature_flags/src/getting_started_single_feature_flag.py b/examples/feature_flags/src/getting_started_single_feature_flag.py new file mode 100644 index 00000000000..a3d54324766 --- /dev/null +++ b/examples/feature_flags/src/getting_started_single_feature_flag.py @@ -0,0 +1,34 @@ +from typing import Any + +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags +from aws_lambda_powertools.utilities.typing import LambdaContext + +app_config = AppConfigStore(environment="dev", application="product-catalogue", name="features") + +feature_flags = FeatureFlags(store=app_config) + + +def lambda_handler(event: dict, context: LambdaContext): + """ + This feature flag is enabled under the following conditions: + - The request payload contains a field 'tier' with the value 'premium'. + + Rule condition to be evaluated: + "conditions": [ + { + "action": "EQUALS", + "key": "tier", + "value": "premium" + } + ] + """ + + # Get customer's tier from incoming request + ctx = {"tier": event.get("tier", "standard")} + + # Evaluate whether customer's tier has access to premium features + # based on `has_premium_features` rules + has_premium_features: Any = feature_flags.evaluate(name="premium_features", context=ctx, default=False) + if has_premium_features: + # enable premium features + ... diff --git a/examples/feature_flags/src/getting_started_single_feature_flag_features.json b/examples/feature_flags/src/getting_started_single_feature_flag_features.json new file mode 100644 index 00000000000..8f7a7615db3 --- /dev/null +++ b/examples/feature_flags/src/getting_started_single_feature_flag_features.json @@ -0,0 +1,20 @@ +{ + "premium_features": { + "default": false, + "rules": { + "customer tier equals premium": { + "when_match": true, + "conditions": [ + { + "action": "EQUALS", + "key": "tier", + "value": "premium" + } + ] + } + } + }, + "ten_percent_off_campaign": { + "default": false + } +} diff --git a/examples/feature_flags/src/getting_started_single_feature_flag_payload.json b/examples/feature_flags/src/getting_started_single_feature_flag_payload.json new file mode 100644 index 00000000000..d63f3bff11a --- /dev/null +++ b/examples/feature_flags/src/getting_started_single_feature_flag_payload.json @@ -0,0 +1,5 @@ +{ + "username": "lessa", + "tier": "premium", + "basked_id": "random_id" +} diff --git a/examples/feature_flags/src/getting_started_static_flag.py b/examples/feature_flags/src/getting_started_static_flag.py new file mode 100644 index 00000000000..5d8c185cf2d --- /dev/null +++ b/examples/feature_flags/src/getting_started_static_flag.py @@ -0,0 +1,24 @@ +from typing import Any + +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags +from aws_lambda_powertools.utilities.typing import LambdaContext + +app_config = AppConfigStore(environment="dev", application="product-catalogue", name="features") + +feature_flags = FeatureFlags(store=app_config) + + +def lambda_handler(event: dict, context: LambdaContext): + """ + This feature flag is enabled by default for all requests. + """ + + apply_discount: Any = feature_flags.evaluate(name="ten_percent_off_campaign", default=False) + + price: Any = event.get("price") + + if apply_discount: + # apply 10% discount to product + price = price * 0.9 + + return {"price": price} diff --git a/examples/feature_flags/src/getting_started_static_flag_features.json b/examples/feature_flags/src/getting_started_static_flag_features.json new file mode 100644 index 00000000000..fe692cdf0c3 --- /dev/null +++ b/examples/feature_flags/src/getting_started_static_flag_features.json @@ -0,0 +1,5 @@ +{ + "ten_percent_off_campaign": { + "default": true + } +} diff --git a/examples/feature_flags/src/getting_started_static_flag_payload.json b/examples/feature_flags/src/getting_started_static_flag_payload.json new file mode 100644 index 00000000000..b2a71282f8e --- /dev/null +++ b/examples/feature_flags/src/getting_started_static_flag_payload.json @@ -0,0 +1,4 @@ +{ + "product": "laptop", + "price": 1000 +} diff --git a/examples/feature_flags/src/getting_started_with_cache.py b/examples/feature_flags/src/getting_started_with_cache.py new file mode 100644 index 00000000000..1437c7266be --- /dev/null +++ b/examples/feature_flags/src/getting_started_with_cache.py @@ -0,0 +1,24 @@ +from typing import Any + +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags +from aws_lambda_powertools.utilities.typing import LambdaContext + +app_config = AppConfigStore(environment="dev", application="product-catalogue", name="features", max_age=300) + +feature_flags = FeatureFlags(store=app_config) + + +def lambda_handler(event: dict, context: LambdaContext): + """ + This feature flag is enabled by default for all requests. + """ + + apply_discount: Any = feature_flags.evaluate(name="ten_percent_off_campaign", default=False) + + price: Any = event.get("price") + + if apply_discount: + # apply 10% discount to product + price = price * 0.9 + + return {"price": price} diff --git a/examples/feature_flags/src/getting_started_with_cache_features.json b/examples/feature_flags/src/getting_started_with_cache_features.json new file mode 100644 index 00000000000..fe692cdf0c3 --- /dev/null +++ b/examples/feature_flags/src/getting_started_with_cache_features.json @@ -0,0 +1,5 @@ +{ + "ten_percent_off_campaign": { + "default": true + } +} diff --git a/examples/feature_flags/src/getting_started_with_cache_payload.json b/examples/feature_flags/src/getting_started_with_cache_payload.json new file mode 100644 index 00000000000..b2a71282f8e --- /dev/null +++ b/examples/feature_flags/src/getting_started_with_cache_payload.json @@ -0,0 +1,4 @@ +{ + "product": "laptop", + "price": 1000 +} diff --git a/examples/feature_flags/src/getting_started_with_tests.py b/examples/feature_flags/src/getting_started_with_tests.py new file mode 100644 index 00000000000..81152dca104 --- /dev/null +++ b/examples/feature_flags/src/getting_started_with_tests.py @@ -0,0 +1,52 @@ +from aws_lambda_powertools.utilities.feature_flags import ( + AppConfigStore, + FeatureFlags, + RuleAction, +) + + +def init_feature_flags(mocker, mock_schema, envelope="") -> FeatureFlags: + """Mock AppConfig Store get_configuration method to use mock schema instead""" + + method_to_mock = "aws_lambda_powertools.utilities.feature_flags.AppConfigStore.get_configuration" + mocked_get_conf = mocker.patch(method_to_mock) + mocked_get_conf.return_value = mock_schema + + app_conf_store = AppConfigStore( + environment="test_env", + application="test_app", + name="test_conf_name", + envelope=envelope, + ) + + return FeatureFlags(store=app_conf_store) + + +def test_flags_condition_match(mocker): + # GIVEN + expected_value = True + mocked_app_config_schema = { + "my_feature": { + "default": False, + "rules": { + "tenant id equals 12345": { + "when_match": expected_value, + "conditions": [ + { + "action": RuleAction.EQUALS.value, + "key": "tenant_id", + "value": "12345", + } + ], + } + }, + } + } + + # WHEN + ctx = {"tenant_id": "12345", "username": "a"} + feature_flags = init_feature_flags(mocker=mocker, mock_schema=mocked_app_config_schema) + flag = feature_flags.evaluate(name="my_feature", context=ctx, default=False) + + # THEN + assert flag == expected_value diff --git a/examples/feature_flags/src/getting_stored_features.py b/examples/feature_flags/src/getting_stored_features.py new file mode 100644 index 00000000000..07f115375a6 --- /dev/null +++ b/examples/feature_flags/src/getting_stored_features.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags + +app_config = AppConfigStore( + environment="dev", application="product-catalogue", name="configuration", envelope="feature_flags" +) + +feature_flags = FeatureFlags(store=app_config) + +config = app_config.get_raw_configuration +... diff --git a/examples/feature_flags/src/minimal_schema.json b/examples/feature_flags/src/minimal_schema.json new file mode 100644 index 00000000000..7302ab2784a --- /dev/null +++ b/examples/feature_flags/src/minimal_schema.json @@ -0,0 +1,9 @@ +{ + "global_feature": { + "default": true + }, + "non_boolean_global_feature": { + "default": {"group": "read-only"}, + "boolean_type": false + } +} diff --git a/examples/feature_flags/src/timebased_feature.py b/examples/feature_flags/src/timebased_feature.py index 0b0963489f4..46fbbc1c3d5 100644 --- a/examples/feature_flags/src/timebased_feature.py +++ b/examples/feature_flags/src/timebased_feature.py @@ -1,16 +1,46 @@ from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags +from aws_lambda_powertools.utilities.typing import LambdaContext app_config = AppConfigStore(environment="dev", application="product-catalogue", name="features") feature_flags = FeatureFlags(store=app_config) -def lambda_handler(event, context): +def lambda_handler(event: dict, context: LambdaContext): + """ + This feature flag is enabled under the following conditions: + - The request payload contains a field 'tier' with the value 'premium'. + - If the current day is either Saturday or Sunday in America/New_York timezone. + + Rule condition to be evaluated: + "conditions": [ + { + "action": "EQUALS", + "key": "tier", + "value": "premium" + }, + { + "action": "SCHEDULE_BETWEEN_DAYS_OF_WEEK", + "key": "CURRENT_DAY_OF_WEEK", + "value": { + "DAYS": [ + "SATURDAY", + "SUNDAY" + ], + "TIMEZONE": "America/New_York" + } + } + ] + """ + # Get customer's tier from incoming request ctx = {"tier": event.get("tier", "standard")} + # Checking if the weekend premum discount is enable weekend_premium_discount = feature_flags.evaluate(name="weekend_premium_discount", default=False, context=ctx) if weekend_premium_discount: - # Enable special discount for premium members on weekends - pass + # Enable special discount on weekend for premium users: + return {"message": "The weekend premium discount is enabled."} + + return {"message": "The weekend premium discount is not enabled."} diff --git a/examples/feature_flags/src/timebased_happyhour_feature.py b/examples/feature_flags/src/timebased_happyhour_feature.py index b008481c722..8b71062bdff 100644 --- a/examples/feature_flags/src/timebased_happyhour_feature.py +++ b/examples/feature_flags/src/timebased_happyhour_feature.py @@ -1,13 +1,35 @@ from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags +from aws_lambda_powertools.utilities.typing import LambdaContext app_config = AppConfigStore(environment="dev", application="product-catalogue", name="features") feature_flags = FeatureFlags(store=app_config) -def lambda_handler(event, context): +def lambda_handler(event: dict, context: LambdaContext): + """ + This feature flag is enabled under the following conditions: + - Every day between 17:00 to 19:00 in Europe/Copenhagen timezone + + Rule condition to be evaluated: + "conditions": [ + { + "action": "SCHEDULE_BETWEEN_TIME_RANGE", + "key": "CURRENT_TIME", + "value": { + "START": "17:00", + "END": "19:00", + "TIMEZONE": "Europe/Copenhagen" + } + } + ] + """ + + # Checking if the happy hour discount is enable is_happy_hour = feature_flags.evaluate(name="happy_hour", default=False) if is_happy_hour: - # Apply special discount - pass + # Enable special discount on happy hour: + return {"message": "The happy hour discount is enabled."} + + return {"message": "The happy hour discount is not enabled."} diff --git a/examples/feature_flags/src/working_with_own_s3_store_provider.py b/examples/feature_flags/src/working_with_own_s3_store_provider.py new file mode 100644 index 00000000000..ad7488388a4 --- /dev/null +++ b/examples/feature_flags/src/working_with_own_s3_store_provider.py @@ -0,0 +1,22 @@ +from typing import Any + +from custom_s3_store_provider import S3StoreProvider + +from aws_lambda_powertools.utilities.feature_flags import FeatureFlags +from aws_lambda_powertools.utilities.typing import LambdaContext + +s3_config_store = S3StoreProvider("your-bucket-name", "working_with_own_s3_store_provider_features.json") + +feature_flags = FeatureFlags(store=s3_config_store) + + +def lambda_handler(event: dict, context: LambdaContext): + apply_discount: Any = feature_flags.evaluate(name="ten_percent_off_campaign", default=False) + + price: Any = event.get("price") + + if apply_discount: + # apply 10% discount to product + price = price * 0.9 + + return {"price": price} diff --git a/examples/feature_flags/src/working_with_own_s3_store_provider_features.json b/examples/feature_flags/src/working_with_own_s3_store_provider_features.json new file mode 100644 index 00000000000..fe692cdf0c3 --- /dev/null +++ b/examples/feature_flags/src/working_with_own_s3_store_provider_features.json @@ -0,0 +1,5 @@ +{ + "ten_percent_off_campaign": { + "default": true + } +} diff --git a/examples/feature_flags/src/working_with_own_s3_store_provider_payload.json b/examples/feature_flags/src/working_with_own_s3_store_provider_payload.json new file mode 100644 index 00000000000..b2a71282f8e --- /dev/null +++ b/examples/feature_flags/src/working_with_own_s3_store_provider_payload.json @@ -0,0 +1,4 @@ +{ + "product": "laptop", + "price": 1000 +} From 961cbe2f7fef1be999840ad70b66b727b505ba23 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 May 2023 12:18:36 +0100 Subject: [PATCH 39/76] chore(deps): bump pymdown-extensions from 9.11 to 10.0 (#2262) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index a29df85f02f..1de16cbac05 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2136,14 +2136,14 @@ files = [ [[package]] name = "pymdown-extensions" -version = "9.11" +version = "10.0" description = "Extension pack for Python Markdown." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pymdown_extensions-9.11-py3-none-any.whl", hash = "sha256:a499191d8d869f30339de86fcf072a787e86c42b6f16f280f5c2cf174182b7f3"}, - {file = "pymdown_extensions-9.11.tar.gz", hash = "sha256:f7e86c1d3981f23d9dc43294488ecb54abadd05b0be4bf8f0e15efc90f7853ff"}, + {file = "pymdown_extensions-10.0-py3-none-any.whl", hash = "sha256:e6cbe8ace7d8feda30bc4fd6a21a073893a9a0e90c373e92d69ce5b653051f55"}, + {file = "pymdown_extensions-10.0.tar.gz", hash = "sha256:9a77955e63528c2ee98073a1fb3207c1a45607bc74a34ef21acd098f46c3aa8a"}, ] [package.dependencies] From 97748a696bc5ab46a14328289bb47b1f9b5f6e80 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 16 May 2023 12:20:27 +0100 Subject: [PATCH 40/76] chore(deps): update mkdocs configuration to support pymdown-extensions 10.0 (#2271) --- mkdocs.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/mkdocs.yml b/mkdocs.yml index f0a0f3b9681..2880881af91 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -75,6 +75,7 @@ markdown_extensions: - pymdownx.snippets: base_path: "." check_paths: true + restrict_base_path: false - meta - toc: permalink: true From eb78e651938ad1a8b50e0d55b13688e40eb44ed2 Mon Sep 17 00:00:00 2001 From: Ruben Fonseca Date: Tue, 16 May 2023 14:54:53 +0200 Subject: [PATCH 41/76] chore(ci): add more permissions to analytics --- .github/workflows/dispatch_analytics.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/dispatch_analytics.yml b/.github/workflows/dispatch_analytics.yml index 0b5a19e0408..d307ef62326 100644 --- a/.github/workflows/dispatch_analytics.yml +++ b/.github/workflows/dispatch_analytics.yml @@ -4,13 +4,13 @@ on: workflow_dispatch: schedule: - - cron: '0 * * * *' + - cron: "0 * * * *" permissions: id-token: write actions: read checks: read - contents: read + contents: write deployments: read issues: read discussions: read From c9c02d302df9717805f0ea0c514d3d932ebb1f90 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 16 May 2023 14:11:18 +0100 Subject: [PATCH 42/76] chore(governance): Fix python version in issue templates (#2275) --- .github/ISSUE_TEMPLATE/bug_report.yml | 8 ++++---- .github/ISSUE_TEMPLATE/static_typing.yml | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index e3dd2c17667..f9fa23ae952 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -58,10 +58,10 @@ body: attributes: label: AWS Lambda function runtime options: - - 3.7 - - 3.8 - - 3.9 - - 3.10 + - "3.7" + - "3.8" + - "3.9" + - "3.10" validations: required: true - type: dropdown diff --git a/.github/ISSUE_TEMPLATE/static_typing.yml b/.github/ISSUE_TEMPLATE/static_typing.yml index 3bd302e7e1c..60f216d3b92 100644 --- a/.github/ISSUE_TEMPLATE/static_typing.yml +++ b/.github/ISSUE_TEMPLATE/static_typing.yml @@ -25,10 +25,10 @@ body: attributes: label: AWS Lambda function runtime options: - - 3.7 - - 3.8 - - 3.9 - - 3.10 + - "3.7" + - "3.8" + - "3.9" + - "3.10" validations: required: true - type: input From 7bb0ce49c30fb7d8547b52874be1f0fcd88f42ba Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 May 2023 22:41:40 +0100 Subject: [PATCH 43/76] chore(deps-dev): bump mkdocs-material from 9.1.12 to 9.1.13 (#2280) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1de16cbac05..c0b6a41b0df 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1574,14 +1574,14 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "9.1.12" +version = "9.1.13" description = "Documentation that simply works" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material-9.1.12-py3-none-any.whl", hash = "sha256:68c57d95d10104179c8c3ce9a88ee9d2322a5145b3d0f1f38ff686253fb5ec98"}, - {file = "mkdocs_material-9.1.12.tar.gz", hash = "sha256:d4ebe9b5031ce63a265c19fb5eab4d27ea4edadb05de206372e831b2b7570fb5"}, + {file = "mkdocs_material-9.1.13-py3-none-any.whl", hash = "sha256:5705cf8cb6c47c747606bd914bb6c01993ff141295cd259475559a1f09f07d5d"}, + {file = "mkdocs_material-9.1.13.tar.gz", hash = "sha256:9102e7604d73e507021847601b0a8b4fe9035422788390183f464fa3b30dd508"}, ] [package.dependencies] @@ -3078,4 +3078,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "80772072960025e2e1c804a7d8cc2cc2afe024aba13a30f16af022a0258072f9" +content-hash = "dfc9c92c89b3408bdb822fe64cb28eb5a91eaa332a08f952cfdc38733727caef" diff --git a/pyproject.toml b/pyproject.toml index 56844398d85..3d1a3e6f215 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ mypy-boto3-s3 = "^1.26.127" mypy-boto3-xray = "^1.26.122" types-requests = "^2.30.0" typing-extensions = "^4.4.0" -mkdocs-material = "^9.1.12" +mkdocs-material = "^9.1.13" filelock = "^3.12.0" checksumdir = "^1.2.0" mypy-boto3-appconfigdata = "^1.26.70" From 2e66770396df2ec9300b101c20d706d30ea2d5e6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 May 2023 23:47:42 +0100 Subject: [PATCH 44/76] chore(deps-dev): bump sentry-sdk from 1.23.0 to 1.23.1 (#2283) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index c0b6a41b0df..72adc490616 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2657,14 +2657,14 @@ pbr = "*" [[package]] name = "sentry-sdk" -version = "1.23.0" +version = "1.23.1" description = "Python client for Sentry (https://sentry.io)" category = "dev" optional = false python-versions = "*" files = [ - {file = "sentry-sdk-1.23.0.tar.gz", hash = "sha256:58f4ff9e76c21bc7172eeec9f1bccb3ff2247c74c71d5590438ce36c803f46ea"}, - {file = "sentry_sdk-1.23.0-py2.py3-none-any.whl", hash = "sha256:01b56a276642d31cf9b4aaf0b55938677265d7006be4785a10ef6330d0f5bba9"}, + {file = "sentry-sdk-1.23.1.tar.gz", hash = "sha256:0300fbe7a07b3865b3885929fb863a68ff01f59e3bcfb4e7953d0bf7fd19c67f"}, + {file = "sentry_sdk-1.23.1-py2.py3-none-any.whl", hash = "sha256:a884e2478e0b055776ea2b9234d5de9339b4bae0b3a5e74ae43d131db8ded27e"}, ] [package.dependencies] From 04e25c209635fa901e2be93ae57d5c89486d5514 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 May 2023 23:48:05 +0100 Subject: [PATCH 45/76] chore(deps-dev): bump mypy-boto3-secretsmanager from 1.26.116 to 1.26.135 (#2282) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 72adc490616..7d2c774d895 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1795,14 +1795,14 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-secretsmanager" -version = "1.26.116" -description = "Type annotations for boto3.SecretsManager 1.26.116 service generated with mypy-boto3-builder 7.14.5" +version = "1.26.135" +description = "Type annotations for boto3.SecretsManager 1.26.135 service generated with mypy-boto3-builder 7.14.5" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-secretsmanager-1.26.116.tar.gz", hash = "sha256:d95bbef7fdd39876fe42799ed15abd83eee80b612734bd83513b8ce720e7ceec"}, - {file = "mypy_boto3_secretsmanager-1.26.116-py3-none-any.whl", hash = "sha256:3d2f4c42945447e6abdd18289c159a8d5b0e45ccc32067915863e93e38cd9c49"}, + {file = "mypy-boto3-secretsmanager-1.26.135.tar.gz", hash = "sha256:cf523d3e4f6729e244e24d97c692855883e69fa270d11f5021a293fb2aa483e8"}, + {file = "mypy_boto3_secretsmanager-1.26.135-py3-none-any.whl", hash = "sha256:15cf8d8a16eb0a49984ef9f19821a1d2b97bf1e6b56c703f27973a27b32aef4c"}, ] [package.dependencies] @@ -3078,4 +3078,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "dfc9c92c89b3408bdb822fe64cb28eb5a91eaa332a08f952cfdc38733727caef" +content-hash = "a67272cfe964ac20cbf1c84a87f40fb883a474327322218e1390662d8c528639" diff --git a/pyproject.toml b/pyproject.toml index 3d1a3e6f215..a2c6f659f57 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,7 @@ mypy-boto3-cloudwatch = "^1.26.127" mypy-boto3-dynamodb = "^1.26.115" mypy-boto3-lambda = "^1.26.122" mypy-boto3-logs = "^1.26.53" -mypy-boto3-secretsmanager = "^1.26.116" +mypy-boto3-secretsmanager = "^1.26.135" mypy-boto3-ssm = "^1.26.97" mypy-boto3-s3 = "^1.26.127" mypy-boto3-xray = "^1.26.122" From 27d197ca844b995644fcbb6a1d6db31555f2e8ec Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Thu, 18 May 2023 17:27:45 +0200 Subject: [PATCH 46/76] docs(batch): add encryption at rest for SQS (#2290) --- examples/batch_processing/sam/kinesis_batch_processing.yaml | 5 ++++- examples/batch_processing/sam/sqs_batch_processing.yaml | 3 ++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/examples/batch_processing/sam/kinesis_batch_processing.yaml b/examples/batch_processing/sam/kinesis_batch_processing.yaml index 28b2c58402b..314d4f8c98f 100644 --- a/examples/batch_processing/sam/kinesis_batch_processing.yaml +++ b/examples/batch_processing/sam/kinesis_batch_processing.yaml @@ -1,4 +1,4 @@ -AWSTemplateFormatVersion: '2010-09-09' +AWSTemplateFormatVersion: "2010-09-09" Transform: AWS::Serverless-2016-10-31 Description: partial batch response sample @@ -51,3 +51,6 @@ Resources: Type: AWS::Kinesis::Stream Properties: ShardCount: 1 + StreamEncryption: + EncryptionType: KMS + KeyId: alias/aws/kinesis diff --git a/examples/batch_processing/sam/sqs_batch_processing.yaml b/examples/batch_processing/sam/sqs_batch_processing.yaml index 00bbd00e569..77871c3478b 100644 --- a/examples/batch_processing/sam/sqs_batch_processing.yaml +++ b/examples/batch_processing/sam/sqs_batch_processing.yaml @@ -1,4 +1,4 @@ -AWSTemplateFormatVersion: '2010-09-09' +AWSTemplateFormatVersion: "2010-09-09" Transform: AWS::Serverless-2016-10-31 Description: partial batch response sample @@ -37,6 +37,7 @@ Resources: Type: AWS::SQS::Queue Properties: VisibilityTimeout: 30 # Fn timeout * 6 + SqsManagedSseEnabled: true RedrivePolicy: maxReceiveCount: 2 deadLetterTargetArn: !GetAtt SampleDLQ.Arn From 042e83a4d1eaa583035143f93cb2752154de3926 Mon Sep 17 00:00:00 2001 From: Ruben Fonseca Date: Thu, 18 May 2023 17:57:15 +0200 Subject: [PATCH 47/76] feat(event_source): allow multiple CORS origins (#2279) Co-authored-by: Leandro Damascena --- .../event_handler/api_gateway.py | 31 ++- .../utilities/data_classes/common.py | 2 +- docs/core/event_handler/api_gateway.md | 23 +- .../event_handler_rest/src/setting_cors.py | 3 +- .../src/setting_cors_extra_origins.py | 45 ++++ .../setting_cors_extra_origins_output.json | 10 + .../e2e/event_handler/handlers/alb_handler.py | 12 +- .../handlers/api_gateway_http_handler.py | 4 +- .../handlers/api_gateway_rest_handler.py | 4 +- .../handlers/lambda_function_url_handler.py | 4 +- tests/e2e/event_handler/test_cors.py | 252 ++++++++++++++++++ tests/events/apiGatewayProxyEvent.json | 3 +- .../event_handler/test_api_gateway.py | 48 +++- 13 files changed, 414 insertions(+), 27 deletions(-) create mode 100644 examples/event_handler_rest/src/setting_cors_extra_origins.py create mode 100644 examples/event_handler_rest/src/setting_cors_extra_origins_output.json create mode 100644 tests/e2e/event_handler/test_cors.py diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index 78993f92c5e..05fbc1c06c1 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -84,6 +84,7 @@ def with_cors(): cors_config = CORSConfig( allow_origin="https://wwww.example.com/", + extra_origins=["https://dev.example.com/"], expose_headers=["x-exposed-response-header"], allow_headers=["x-custom-request-header"], max_age=100, @@ -106,6 +107,7 @@ def without_cors(): def __init__( self, allow_origin: str = "*", + extra_origins: Optional[List[str]] = None, allow_headers: Optional[List[str]] = None, expose_headers: Optional[List[str]] = None, max_age: Optional[int] = None, @@ -117,6 +119,8 @@ def __init__( allow_origin: str The value of the `Access-Control-Allow-Origin` to send in the response. Defaults to "*", but should only be used during development. + extra_origins: Optional[List[str]] + The list of additional allowed origins. allow_headers: Optional[List[str]] The list of additional allowed headers. This list is added to list of built-in allowed headers: `Authorization`, `Content-Type`, `X-Amz-Date`, @@ -128,16 +132,29 @@ def __init__( allow_credentials: bool A boolean value that sets the value of `Access-Control-Allow-Credentials` """ - self.allow_origin = allow_origin + self._allowed_origins = [allow_origin] + if extra_origins: + self._allowed_origins.extend(extra_origins) self.allow_headers = set(self._REQUIRED_HEADERS + (allow_headers or [])) self.expose_headers = expose_headers or [] self.max_age = max_age self.allow_credentials = allow_credentials - def to_dict(self) -> Dict[str, str]: + def to_dict(self, origin: Optional[str]) -> Dict[str, str]: """Builds the configured Access-Control http headers""" + + # If there's no Origin, don't add any CORS headers + if not origin: + return {} + + # If the origin doesn't match any of the allowed origins, and we don't allow all origins ("*"), + # don't add any CORS headers + if origin not in self._allowed_origins and "*" not in self._allowed_origins: + return {} + + # The origin matched an allowed origin, so return the CORS headers headers: Dict[str, str] = { - "Access-Control-Allow-Origin": self.allow_origin, + "Access-Control-Allow-Origin": origin, "Access-Control-Allow-Headers": ",".join(sorted(self.allow_headers)), } @@ -207,9 +224,9 @@ def __init__(self, response: Response, route: Optional[Route] = None): self.response = response self.route = route - def _add_cors(self, cors: CORSConfig): + def _add_cors(self, event: BaseProxyEvent, cors: CORSConfig): """Update headers to include the configured Access-Control headers""" - self.response.headers.update(cors.to_dict()) + self.response.headers.update(cors.to_dict(event.get_header_value("Origin"))) def _add_cache_control(self, cache_control: str): """Set the specified cache control headers for 200 http responses. For non-200 `no-cache` is used.""" @@ -230,7 +247,7 @@ def _route(self, event: BaseProxyEvent, cors: Optional[CORSConfig]): if self.route is None: return if self.route.cors: - self._add_cors(cors or CORSConfig()) + self._add_cors(event, cors or CORSConfig()) if self.route.cache_control: self._add_cache_control(self.route.cache_control) if self.route.compress and "gzip" in (event.get_header_value("accept-encoding", "") or ""): @@ -644,7 +661,7 @@ def _not_found(self, method: str) -> ResponseBuilder: headers: Dict[str, Union[str, List[str]]] = {} if self._cors: logger.debug("CORS is enabled, updating headers.") - headers.update(self._cors.to_dict()) + headers.update(self._cors.to_dict(self.current_event.get_header_value("Origin"))) if method == "OPTIONS": logger.debug("Pre-flight request detected. Returning CORS with null response") diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py index d1ce8f90a07..ce02a4c11b0 100644 --- a/aws_lambda_powertools/utilities/data_classes/common.py +++ b/aws_lambda_powertools/utilities/data_classes/common.py @@ -113,7 +113,7 @@ def get_header_value( class BaseProxyEvent(DictWrapper): @property def headers(self) -> Dict[str, str]: - return self["headers"] + return self.get("headers") or {} @property def query_string_parameters(self) -> Optional[Dict[str, str]]: diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 9348575535a..3dc6401ea8d 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -280,7 +280,8 @@ To address this API Gateway behavior, we use `strip_prefixes` parameter to accou You can configure CORS at the `APIGatewayRestResolver` constructor via `cors` parameter using the `CORSConfig` class. -This will ensure that CORS headers are always returned as part of the response when your functions match the path invoked. +This will ensure that CORS headers are returned as part of the response when your functions match the path invoked and the `Origin` +matches one of the allowed values. ???+ tip Optionally disable CORS on a per path basis with `cors=False` parameter. @@ -297,6 +298,18 @@ This will ensure that CORS headers are always returned as part of the response w --8<-- "examples/event_handler_rest/src/setting_cors_output.json" ``` +=== "setting_cors_extra_origins.py" + + ```python hl_lines="5 11-12 34" + --8<-- "examples/event_handler_rest/src/setting_cors_extra_origins.py" + ``` + +=== "setting_cors_extra_origins_output.json" + + ```json + --8<-- "examples/event_handler_rest/src/setting_cors_extra_origins_output.json" + ``` + #### Pre-flight Pre-flight (OPTIONS) calls are typically handled at the API Gateway or Lambda Function URL level as per [our sample infrastructure](#required-resources), no Lambda integration is necessary. However, ALB expects you to handle pre-flight requests. @@ -310,9 +323,13 @@ For convenience, these are the default values when using `CORSConfig` to enable ???+ warning Always configure `allow_origin` when using in production. +???+ tip "Multiple origins?" + If you need to allow multiple origins, pass the additional origins using the `extra_origins` key. + | Key | Value | Note | -| -------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +|----------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | **[allow_origin](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin){target="_blank"}**: `str` | `*` | Only use the default value for development. **Never use `*` for production** unless your use case requires it | +| **[extra_origins](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin){target="_blank"}**: `List[str]` | `[]` | Additional origins to be allowed, in addition to the one specified in `allow_origin` | | **[allow_headers](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Headers){target="_blank"}**: `List[str]` | `[Authorization, Content-Type, X-Amz-Date, X-Api-Key, X-Amz-Security-Token]` | Additional headers will be appended to the default list for your convenience | | **[expose_headers](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Expose-Headers){target="_blank"}**: `List[str]` | `[]` | Any additional header beyond the [safe listed by CORS specification](https://developer.mozilla.org/en-US/docs/Glossary/CORS-safelisted_response_header){target="_blank"}. | | **[max_age](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Max-Age){target="_blank"}**: `int` | `` | Only for pre-flight requests if you choose to have your function to handle it instead of API Gateway | @@ -331,7 +348,7 @@ You can use the `Response` class to have full control over the response. For exa === "fine_grained_responses.py" - ```python hl_lines="9 28-32" + ```python hl_lines="9 29-35" --8<-- "examples/event_handler_rest/src/fine_grained_responses.py" ``` diff --git a/examples/event_handler_rest/src/setting_cors.py b/examples/event_handler_rest/src/setting_cors.py index 101e013e552..14470cf9d1e 100644 --- a/examples/event_handler_rest/src/setting_cors.py +++ b/examples/event_handler_rest/src/setting_cors.py @@ -8,7 +8,8 @@ tracer = Tracer() logger = Logger() -cors_config = CORSConfig(allow_origin="https://example.com", max_age=300) +# CORS will match when Origin is only https://www.example.com +cors_config = CORSConfig(allow_origin="https://www.example.com", max_age=300) app = APIGatewayRestResolver(cors=cors_config) diff --git a/examples/event_handler_rest/src/setting_cors_extra_origins.py b/examples/event_handler_rest/src/setting_cors_extra_origins.py new file mode 100644 index 00000000000..3afb2794ec6 --- /dev/null +++ b/examples/event_handler_rest/src/setting_cors_extra_origins.py @@ -0,0 +1,45 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver, CORSConfig +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +# CORS will match when Origin is https://www.example.com OR https://dev.example.com +cors_config = CORSConfig(allow_origin="https://www.example.com", extra_origins=["https://dev.example.com"], max_age=300) +app = APIGatewayRestResolver(cors=cors_config) + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +@app.get("/todos/") +@tracer.capture_method +def get_todo_by_id(todo_id: str): # value come as str + todos: Response = requests.get(f"https://jsonplaceholder.typicode.com/todos/{todo_id}") + todos.raise_for_status() + + return {"todos": todos.json()} + + +@app.get("/healthcheck", cors=False) # optionally removes CORS for a given route +@tracer.capture_method +def am_i_alive(): + return {"am_i_alive": "yes"} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/setting_cors_extra_origins_output.json b/examples/event_handler_rest/src/setting_cors_extra_origins_output.json new file mode 100644 index 00000000000..c123435338c --- /dev/null +++ b/examples/event_handler_rest/src/setting_cors_extra_origins_output.json @@ -0,0 +1,10 @@ +{ + "statusCode": 200, + "multiValueHeaders": { + "Content-Type": ["application/json"], + "Access-Control-Allow-Origin": ["https://www.example.com","https://dev.example.com"], + "Access-Control-Allow-Headers": ["Authorization,Content-Type,X-Amz-Date,X-Amz-Security-Token,X-Api-Key"] + }, + "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", + "isBase64Encoded": false +} diff --git a/tests/e2e/event_handler/handlers/alb_handler.py b/tests/e2e/event_handler/handlers/alb_handler.py index 26746284aee..ef1af1792ac 100644 --- a/tests/e2e/event_handler/handlers/alb_handler.py +++ b/tests/e2e/event_handler/handlers/alb_handler.py @@ -1,6 +1,12 @@ -from aws_lambda_powertools.event_handler import ALBResolver, Response, content_types - -app = ALBResolver() +from aws_lambda_powertools.event_handler import ( + ALBResolver, + CORSConfig, + Response, + content_types, +) + +cors_config = CORSConfig(allow_origin="https://www.example.org", extra_origins=["https://dev.example.org"]) +app = ALBResolver(cors=cors_config) # The reason we use post is that whoever is writing tests can easily assert on the # content being sent (body, headers, cookies, content-type) to reduce cognitive load. diff --git a/tests/e2e/event_handler/handlers/api_gateway_http_handler.py b/tests/e2e/event_handler/handlers/api_gateway_http_handler.py index 1012af7b3fb..876d78ef67b 100644 --- a/tests/e2e/event_handler/handlers/api_gateway_http_handler.py +++ b/tests/e2e/event_handler/handlers/api_gateway_http_handler.py @@ -1,10 +1,12 @@ from aws_lambda_powertools.event_handler import ( APIGatewayHttpResolver, + CORSConfig, Response, content_types, ) -app = APIGatewayHttpResolver() +cors_config = CORSConfig(allow_origin="https://www.example.org", extra_origins=["https://dev.example.org"]) +app = APIGatewayHttpResolver(cors=cors_config) # The reason we use post is that whoever is writing tests can easily assert on the # content being sent (body, headers, cookies, content-type) to reduce cognitive load. diff --git a/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py b/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py index d52e2728cab..d09bf6b82c9 100644 --- a/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py +++ b/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py @@ -1,10 +1,12 @@ from aws_lambda_powertools.event_handler import ( APIGatewayRestResolver, + CORSConfig, Response, content_types, ) -app = APIGatewayRestResolver() +cors_config = CORSConfig(allow_origin="https://www.example.org", extra_origins=["https://dev.example.org"]) +app = APIGatewayRestResolver(cors=cors_config) # The reason we use post is that whoever is writing tests can easily assert on the # content being sent (body, headers, cookies, content-type) to reduce cognitive load. diff --git a/tests/e2e/event_handler/handlers/lambda_function_url_handler.py b/tests/e2e/event_handler/handlers/lambda_function_url_handler.py index f90037afc75..e47035a971d 100644 --- a/tests/e2e/event_handler/handlers/lambda_function_url_handler.py +++ b/tests/e2e/event_handler/handlers/lambda_function_url_handler.py @@ -1,10 +1,12 @@ from aws_lambda_powertools.event_handler import ( + CORSConfig, LambdaFunctionUrlResolver, Response, content_types, ) -app = LambdaFunctionUrlResolver() +cors_config = CORSConfig(allow_origin="https://www.example.org", extra_origins=["https://dev.example.org"]) +app = LambdaFunctionUrlResolver(cors=cors_config) # The reason we use post is that whoever is writing tests can easily assert on the # content being sent (body, headers, cookies, content-type) to reduce cognitive load. diff --git a/tests/e2e/event_handler/test_cors.py b/tests/e2e/event_handler/test_cors.py new file mode 100644 index 00000000000..5d2f140715f --- /dev/null +++ b/tests/e2e/event_handler/test_cors.py @@ -0,0 +1,252 @@ +import pytest +from requests import Request + +from tests.e2e.utils import data_fetcher +from tests.e2e.utils.auth import build_iam_auth + + +@pytest.fixture +def alb_basic_listener_endpoint(infrastructure: dict) -> str: + dns_name = infrastructure.get("ALBDnsName") + port = infrastructure.get("ALBBasicListenerPort", "") + return f"http://{dns_name}:{port}" + + +@pytest.fixture +def apigw_http_endpoint(infrastructure: dict) -> str: + return infrastructure.get("APIGatewayHTTPUrl", "") + + +@pytest.fixture +def apigw_rest_endpoint(infrastructure: dict) -> str: + return infrastructure.get("APIGatewayRestUrl", "") + + +@pytest.fixture +def lambda_function_url_endpoint(infrastructure: dict) -> str: + return infrastructure.get("LambdaFunctionUrl", "") + + +@pytest.mark.xdist_group(name="event_handler") +def test_alb_cors_with_correct_origin(alb_basic_listener_endpoint): + # GIVEN + url = f"{alb_basic_listener_endpoint}/todos" + headers = {"Origin": "https://www.example.org"} + + # WHEN + response = data_fetcher.get_http_response(Request(method="POST", url=url, headers=headers, json={})) + + # THEN response has CORS headers + assert response.headers["Access-Control-Allow-Origin"] == "https://www.example.org" + + +@pytest.mark.xdist_group(name="event_handler") +def test_alb_cors_with_correct_alternative_origin(alb_basic_listener_endpoint): + # GIVEN + url = f"{alb_basic_listener_endpoint}/todos" + headers = {"Origin": "https://dev.example.org"} + + # WHEN + response = data_fetcher.get_http_response(Request(method="POST", url=url, headers=headers, json={})) + + # THEN response has CORS headers + assert response.headers["Access-Control-Allow-Origin"] == "https://dev.example.org" + + +@pytest.mark.xdist_group(name="event_handler") +def test_alb_cors_with_unknown_origin(alb_basic_listener_endpoint): + # GIVEN + url = f"{alb_basic_listener_endpoint}/todos" + headers = {"Origin": "https://www.google.com"} + + # WHEN + response = data_fetcher.get_http_response(Request(method="POST", url=url, headers=headers, json={})) + + # THEN response does NOT have CORS headers + assert "Access-Control-Allow-Origin" not in response.headers + + +@pytest.mark.xdist_group(name="event_handler") +def test_api_gateway_http_cors_with_correct_origin(apigw_http_endpoint): + # GIVEN + url = f"{apigw_http_endpoint}todos" + headers = {"Origin": "https://www.example.org"} + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + headers=headers, + json={}, + auth=build_iam_auth(url=url, aws_service="execute-api"), + ) + ) + + # THEN response has CORS headers + assert response.headers["Access-Control-Allow-Origin"] == "https://www.example.org" + + +@pytest.mark.xdist_group(name="event_handler") +def test_api_gateway_http_cors_with_correct_alternative_origin(apigw_http_endpoint): + # GIVEN + url = f"{apigw_http_endpoint}todos" + headers = {"Origin": "https://dev.example.org"} + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + headers=headers, + json={}, + auth=build_iam_auth(url=url, aws_service="execute-api"), + ) + ) + + # THEN response has CORS headers + assert response.headers["Access-Control-Allow-Origin"] == "https://dev.example.org" + + +@pytest.mark.xdist_group(name="event_handler") +def test_api_gateway_http_cors_with_unknown_origin(apigw_http_endpoint): + # GIVEN + url = f"{apigw_http_endpoint}todos" + headers = {"Origin": "https://www.google.com"} + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + headers=headers, + json={}, + auth=build_iam_auth(url=url, aws_service="execute-api"), + ) + ) + + # THEN response does NOT have CORS headers + assert "Access-Control-Allow-Origin" not in response.headers + + +@pytest.mark.xdist_group(name="event_handler") +def test_api_gateway_rest_cors_with_correct_origin(apigw_rest_endpoint): + # GIVEN + url = f"{apigw_rest_endpoint}todos" + headers = {"Origin": "https://www.example.org"} + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + headers=headers, + json={}, + ) + ) + + # THEN response has CORS headers + assert response.headers["Access-Control-Allow-Origin"] == "https://www.example.org" + + +@pytest.mark.xdist_group(name="event_handler") +def test_api_gateway_rest_cors_with_correct_alternative_origin(apigw_rest_endpoint): + # GIVEN + url = f"{apigw_rest_endpoint}todos" + headers = {"Origin": "https://dev.example.org"} + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + headers=headers, + json={}, + ) + ) + + # THEN response has CORS headers + assert response.headers["Access-Control-Allow-Origin"] == "https://dev.example.org" + + +@pytest.mark.xdist_group(name="event_handler") +def test_api_gateway_rest_cors_with_unknown_origin(apigw_rest_endpoint): + # GIVEN + url = f"{apigw_rest_endpoint}todos" + headers = {"Origin": "https://www.google.com"} + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + headers=headers, + json={}, + ) + ) + + # THEN response does NOT have CORS headers + assert "Access-Control-Allow-Origin" not in response.headers + + +@pytest.mark.xdist_group(name="event_handler") +def test_lambda_function_url_cors_with_correct_origin(lambda_function_url_endpoint): + # GIVEN + url = f"{lambda_function_url_endpoint}todos" + headers = {"Origin": "https://www.example.org"} + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + headers=headers, + json={}, + auth=build_iam_auth(url=url, aws_service="lambda"), + ) + ) + + # THEN response has CORS headers + assert response.headers["Access-Control-Allow-Origin"] == "https://www.example.org" + + +@pytest.mark.xdist_group(name="event_handler") +def test_lambda_function_url_cors_with_correct_alternative_origin(lambda_function_url_endpoint): + # GIVEN + url = f"{lambda_function_url_endpoint}todos" + headers = {"Origin": "https://dev.example.org"} + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + headers=headers, + json={}, + auth=build_iam_auth(url=url, aws_service="lambda"), + ) + ) + + # THEN response has CORS headers + assert response.headers["Access-Control-Allow-Origin"] == "https://dev.example.org" + + +@pytest.mark.xdist_group(name="event_handler") +def test_lambda_function_url_cors_with_unknown_origin(lambda_function_url_endpoint): + # GIVEN + url = f"{lambda_function_url_endpoint}todos" + headers = {"Origin": "https://www.google.com"} + + # WHEN + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + headers=headers, + json={}, + auth=build_iam_auth(url=url, aws_service="lambda"), + ) + ) + + # THEN response does NOT have CORS headers + assert "Access-Control-Allow-Origin" not in response.headers diff --git a/tests/events/apiGatewayProxyEvent.json b/tests/events/apiGatewayProxyEvent.json index 11833d21f2c..da814c91100 100644 --- a/tests/events/apiGatewayProxyEvent.json +++ b/tests/events/apiGatewayProxyEvent.json @@ -5,7 +5,8 @@ "httpMethod": "GET", "headers": { "Header1": "value1", - "Header2": "value2" + "Header2": "value2", + "Origin": "https://aws.amazon.com" }, "multiValueHeaders": { "Header1": [ diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py index ad9f834dbb2..6faad88d7f1 100644 --- a/tests/functional/event_handler/test_api_gateway.py +++ b/tests/functional/event_handler/test_api_gateway.py @@ -343,7 +343,7 @@ def handler(event, context): assert "multiValueHeaders" in result headers = result["multiValueHeaders"] assert headers["Content-Type"] == [content_types.TEXT_HTML] - assert headers["Access-Control-Allow-Origin"] == ["*"] + assert headers["Access-Control-Allow-Origin"] == ["https://aws.amazon.com"] assert "Access-Control-Allow-Credentials" not in headers assert headers["Access-Control-Allow-Headers"] == [",".join(sorted(CORSConfig._REQUIRED_HEADERS))] @@ -533,6 +533,34 @@ def rest_func() -> Response: assert result["body"] == "Not found" +def test_cors_multi_origin(): + # GIVEN a custom cors configuration with multiple origins + cors_config = CORSConfig(allow_origin="https://origin1", extra_origins=["https://origin2", "https://origin3"]) + app = ApiGatewayResolver(cors=cors_config) + + @app.get("/cors") + def get_with_cors(): + return {} + + # WHEN calling the event handler with the correct Origin + event = {"path": "/cors", "httpMethod": "GET", "headers": {"Origin": "https://origin3"}} + result = app(event, None) + + # THEN routes by default return the custom cors headers + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.APPLICATION_JSON] + assert headers["Access-Control-Allow-Origin"] == ["https://origin3"] + + # WHEN calling the event handler with the wrong origin + event = {"path": "/cors", "httpMethod": "GET", "headers": {"Origin": "https://wrong.origin"}} + result = app(event, None) + + # THEN routes by default return the custom cors headers + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.APPLICATION_JSON] + assert "Access-Control-Allow-Origin" not in headers + + def test_custom_cors_config(): # GIVEN a custom cors configuration allow_header = ["foo2"] @@ -544,7 +572,7 @@ def test_custom_cors_config(): allow_credentials=True, ) app = ApiGatewayResolver(cors=cors_config) - event = {"path": "/cors", "httpMethod": "GET"} + event = {"path": "/cors", "httpMethod": "GET", "headers": {"Origin": "https://foo1"}} @app.get("/cors") def get_with_cors(): @@ -561,7 +589,7 @@ def another_one(): assert "multiValueHeaders" in result headers = result["multiValueHeaders"] assert headers["Content-Type"] == [content_types.APPLICATION_JSON] - assert headers["Access-Control-Allow-Origin"] == [cors_config.allow_origin] + assert headers["Access-Control-Allow-Origin"] == ["https://foo1"] expected_allows_headers = [",".join(sorted(set(allow_header + cors_config._REQUIRED_HEADERS)))] assert headers["Access-Control-Allow-Headers"] == expected_allows_headers assert headers["Access-Control-Expose-Headers"] == [",".join(cors_config.expose_headers)] @@ -604,9 +632,9 @@ def test_no_matches_with_cors(): result = app({"path": "/another-one", "httpMethod": "GET"}, None) # THEN return a 404 - # AND cors headers are returned + # AND cors headers are NOT returned (because no Origin header was passed in) assert result["statusCode"] == 404 - assert "Access-Control-Allow-Origin" in result["multiValueHeaders"] + assert "Access-Control-Allow-Origin" not in result["multiValueHeaders"] assert "Not found" in result["body"] @@ -628,7 +656,7 @@ def post_no_cors(): ... # WHEN calling the handler - result = app({"path": "/foo", "httpMethod": "OPTIONS"}, None) + result = app({"path": "/foo", "httpMethod": "OPTIONS", "headers": {"Origin": "http://example.org"}}, None) # THEN return no content # AND include Access-Control-Allow-Methods of the cors methods used @@ -659,8 +687,11 @@ def custom_preflight(): def custom_method(): ... + # AND the request includes an origin + headers = {"Origin": "https://example.org"} + # WHEN calling the handler - result = app({"path": "/some-call", "httpMethod": "OPTIONS"}, None) + result = app({"path": "/some-call", "httpMethod": "OPTIONS", "headers": headers}, None) # THEN return the custom preflight response assert result["statusCode"] == 200 @@ -747,7 +778,8 @@ def service_error(): # AND status code equals 502 assert result["statusCode"] == 502 assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] - assert "Access-Control-Allow-Origin" in result["multiValueHeaders"] + # Because no Origin was passed in, there is not Allow-Origin on the output + assert "Access-Control-Allow-Origin" not in result["multiValueHeaders"] expected = {"statusCode": 502, "message": "Something went wrong!"} assert result["body"] == json_dump(expected) From b3679c3b40699526cc85bed6881a602c1ffe7775 Mon Sep 17 00:00:00 2001 From: Simon Thulbourn Date: Fri, 19 May 2023 12:12:00 +0200 Subject: [PATCH 48/76] feat(docs): Move docs to S3 (#2277) --- .github/workflows/on_push_docs.yml | 4 ++++ .github/workflows/publish_v2_layer.yml | 4 ++-- .github/workflows/rebuild_latest_docs.yml | 4 ++++ .github/workflows/reusable_publish_docs.yml | 26 +++++++++++++++++++++ 4 files changed, 36 insertions(+), 2 deletions(-) diff --git a/.github/workflows/on_push_docs.yml b/.github/workflows/on_push_docs.yml index 340f669b0f7..e257f49e808 100644 --- a/.github/workflows/on_push_docs.yml +++ b/.github/workflows/on_push_docs.yml @@ -10,11 +10,15 @@ on: - "examples/**" - "CHANGELOG.md" +permissions: + id-token: write + jobs: release-docs: permissions: contents: write pages: write + id-token: write uses: ./.github/workflows/reusable_publish_docs.yml with: version: develop diff --git a/.github/workflows/publish_v2_layer.yml b/.github/workflows/publish_v2_layer.yml index dcdc43a6e2c..5447d0a51e5 100644 --- a/.github/workflows/publish_v2_layer.yml +++ b/.github/workflows/publish_v2_layer.yml @@ -28,7 +28,7 @@ jobs: permissions: # lower privilege propagated from parent workflow (release.yml) contents: read - id-token: none + id-token: write pages: none pull-requests: none runs-on: aws-lambda-powertools_ubuntu-latest_8-core @@ -223,7 +223,7 @@ jobs: contents: write pages: write pull-requests: none - id-token: none + id-token: write uses: ./.github/workflows/reusable_publish_docs.yml with: version: ${{ inputs.latest_published_version }} diff --git a/.github/workflows/rebuild_latest_docs.yml b/.github/workflows/rebuild_latest_docs.yml index 3e481860992..deac728ac25 100644 --- a/.github/workflows/rebuild_latest_docs.yml +++ b/.github/workflows/rebuild_latest_docs.yml @@ -14,11 +14,15 @@ on: default: "2.0.0" required: true +permissions: + id-token: write + jobs: release-docs: permissions: contents: write pages: write + id-token: write uses: ./.github/workflows/reusable_publish_docs.yml with: version: ${{ inputs.latest_published_version }} diff --git a/.github/workflows/reusable_publish_docs.yml b/.github/workflows/reusable_publish_docs.yml index eb0d8f8598a..f624d327ee5 100644 --- a/.github/workflows/reusable_publish_docs.yml +++ b/.github/workflows/reusable_publish_docs.yml @@ -26,6 +26,7 @@ on: default: develop permissions: + id-token: write contents: write pages: write @@ -36,6 +37,7 @@ jobs: concurrency: group: on-docs-rebuild runs-on: ubuntu-latest + environment: Docs steps: - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: @@ -88,3 +90,27 @@ jobs: publish_dir: ./api keep_files: true destination_dir: latest/api + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef + with: + aws-region: us-east-1 + role-to-assume: ${{ secrets.AWS_DOCS_ROLE_ARN }} + - name: Copy API Docs + run: | + cp -r api site/ + - name: Deploy Docs (Version) + env: + VERSION: ${{ inputs.version }} + ALIAS: ${{ inputs.alias }} + run: | + aws s3 sync \ + site/ \ + s3://${{ secrets.AWS_DOCS_BUCKET }}/lambda-python/${{ env.VERSION }}/ + - name: Deploy Docs (Alias) + env: + VERSION: ${{ inputs.version }} + ALIAS: ${{ inputs.alias }} + run: | + aws s3 sync \ + site/ \ + s3://${{ secrets.AWS_DOCS_BUCKET }}/lambda-python/${{ env.ALIAS }}/ From 9c26d24025681d9767f3decae808d618348ac8ce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 May 2023 22:25:21 +0100 Subject: [PATCH 49/76] chore(deps-dev): bump pytest-xdist from 3.3.0 to 3.3.1 (#2297) --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7d2c774d895..8ed0b8e20ca 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2291,14 +2291,14 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-xdist" -version = "3.3.0" +version = "3.3.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-xdist-3.3.0.tar.gz", hash = "sha256:d42c9efb388da35480878ef4b2993704c6cea800c8bafbe85a8cdc461baf0748"}, - {file = "pytest_xdist-3.3.0-py3-none-any.whl", hash = "sha256:76f7683d4f993eaff91c9cb0882de0465c4af9c6dd3debc903833484041edc1a"}, + {file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"}, + {file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"}, ] [package.dependencies] @@ -3078,4 +3078,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "a67272cfe964ac20cbf1c84a87f40fb883a474327322218e1390662d8c528639" +content-hash = "4360adbdea8ebbbb21203a0889f65b460883f96e1590d3cff0ef310bbd1e7afe" diff --git a/pyproject.toml b/pyproject.toml index a2c6f659f57..a36f782403c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,7 +63,7 @@ flake8-bugbear = "^23.3.12" mkdocs-git-revision-date-plugin = "^0.3.2" mike = "^1.1.2" retry = "^0.9.2" -pytest-xdist = "^3.3.0" +pytest-xdist = "^3.3.1" aws-cdk-lib = "^2.75.0" "aws-cdk.aws-apigatewayv2-alpha" = "^2.38.1-alpha.0" "aws-cdk.aws-apigatewayv2-integrations-alpha" = "^2.38.1-alpha.0" From 5d70149bc12e1526cae493fb603856ce035aad17 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 May 2023 22:29:36 +0100 Subject: [PATCH 50/76] chore(deps-dev): bump httpx from 0.24.0 to 0.24.1 (#2298) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8ed0b8e20ca..3909679a0e8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -951,14 +951,14 @@ socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "httpx" -version = "0.24.0" +version = "0.24.1" description = "The next generation HTTP client." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "httpx-0.24.0-py3-none-any.whl", hash = "sha256:447556b50c1921c351ea54b4fe79d91b724ed2b027462ab9a329465d147d5a4e"}, - {file = "httpx-0.24.0.tar.gz", hash = "sha256:507d676fc3e26110d41df7d35ebd8b3b8585052450f4097401c9be59d928c63e"}, + {file = "httpx-0.24.1-py3-none-any.whl", hash = "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd"}, + {file = "httpx-0.24.1.tar.gz", hash = "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd"}, ] [package.dependencies] From 42fb8bc09a5df87bb918ebaed1abeb59eecf7d40 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 May 2023 22:08:51 +0100 Subject: [PATCH 51/76] chore(deps-dev): bump mkdocs-material from 9.1.13 to 9.1.14 (#2304) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3909679a0e8..7bc7c5503cf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1574,14 +1574,14 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "9.1.13" +version = "9.1.14" description = "Documentation that simply works" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material-9.1.13-py3-none-any.whl", hash = "sha256:5705cf8cb6c47c747606bd914bb6c01993ff141295cd259475559a1f09f07d5d"}, - {file = "mkdocs_material-9.1.13.tar.gz", hash = "sha256:9102e7604d73e507021847601b0a8b4fe9035422788390183f464fa3b30dd508"}, + {file = "mkdocs_material-9.1.14-py3-none-any.whl", hash = "sha256:b56a9f955ed32d38333715cbbf68ce38f683bf38610c65094fa4ef2db9f08bcd"}, + {file = "mkdocs_material-9.1.14.tar.gz", hash = "sha256:1ae74cc5464ef2f64574d4884512efed7f4db386fb9bc6af20fd427d7a702f49"}, ] [package.dependencies] @@ -3078,4 +3078,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "4360adbdea8ebbbb21203a0889f65b460883f96e1590d3cff0ef310bbd1e7afe" +content-hash = "313ebc539d7ce406d5c06a3a73fb571a882c7115976c17dc7e18e73a238a26f4" diff --git a/pyproject.toml b/pyproject.toml index a36f782403c..1ac5b04408d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ mypy-boto3-s3 = "^1.26.127" mypy-boto3-xray = "^1.26.122" types-requests = "^2.30.0" typing-extensions = "^4.4.0" -mkdocs-material = "^9.1.13" +mkdocs-material = "^9.1.14" filelock = "^3.12.0" checksumdir = "^1.2.0" mypy-boto3-appconfigdata = "^1.26.70" From e4914a1e9b3a17ba9ae9909e790b829644149491 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 May 2023 22:17:12 +0100 Subject: [PATCH 52/76] chore(deps): bump fastjsonschema from 2.16.3 to 2.17.1 (#2307) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7bc7c5503cf..3cfc254af7e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -657,14 +657,14 @@ testing = ["pre-commit"] [[package]] name = "fastjsonschema" -version = "2.16.3" +version = "2.17.1" description = "Fastest Python implementation of JSON schema" category = "main" optional = true python-versions = "*" files = [ - {file = "fastjsonschema-2.16.3-py3-none-any.whl", hash = "sha256:04fbecc94300436f628517b05741b7ea009506ce8f946d40996567c669318490"}, - {file = "fastjsonschema-2.16.3.tar.gz", hash = "sha256:4a30d6315a68c253cfa8f963b9697246315aa3db89f98b97235e345dedfb0b8e"}, + {file = "fastjsonschema-2.17.1-py3-none-any.whl", hash = "sha256:4b90b252628ca695280924d863fe37234eebadc29c5360d322571233dc9746e0"}, + {file = "fastjsonschema-2.17.1.tar.gz", hash = "sha256:f4eeb8a77cef54861dbf7424ac8ce71306f12cbb086c45131bcba2c6a4f726e3"}, ] [package.extras] From ef70c60a660fd25d318ee3ed81c738ab71e6e29c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 May 2023 22:17:26 +0100 Subject: [PATCH 53/76] chore(deps-dev): bump aws-cdk from 2.79.1 to 2.80.0 (#2305) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 14 +++++++------- package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/package-lock.json b/package-lock.json index 2660f7c962b..f4e9f751102 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,13 +8,13 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.79.1" + "aws-cdk": "^2.80.0" } }, "node_modules/aws-cdk": { - "version": "2.79.1", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.79.1.tgz", - "integrity": "sha512-N6intzdRFqrHC+O3Apty34RiTev2+bzvRtUbehVd5IyAmTvLsgE/jlhPUIJV2POSAK+bKOV+ZWEp9qMOj1hq8A==", + "version": "2.80.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.80.0.tgz", + "integrity": "sha512-SKMZ/sGlNmFV37Lk40HHe4QJ2hJZmD0PrkScBmkr33xzEqjyKhN3jIHC4PYqTUeUK/qYemq3Y5OpXKQuWTCoKA==", "dev": true, "bin": { "cdk": "bin/cdk" @@ -43,9 +43,9 @@ }, "dependencies": { "aws-cdk": { - "version": "2.79.1", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.79.1.tgz", - "integrity": "sha512-N6intzdRFqrHC+O3Apty34RiTev2+bzvRtUbehVd5IyAmTvLsgE/jlhPUIJV2POSAK+bKOV+ZWEp9qMOj1hq8A==", + "version": "2.80.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.80.0.tgz", + "integrity": "sha512-SKMZ/sGlNmFV37Lk40HHe4QJ2hJZmD0PrkScBmkr33xzEqjyKhN3jIHC4PYqTUeUK/qYemq3Y5OpXKQuWTCoKA==", "dev": true, "requires": { "fsevents": "2.3.2" diff --git a/package.json b/package.json index d461cfb902f..8b8063b4000 100644 --- a/package.json +++ b/package.json @@ -2,6 +2,6 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.79.1" + "aws-cdk": "^2.80.0" } } From 653db455d1c0679b5e3f90c09f3b75889443e3e2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 May 2023 22:17:42 +0100 Subject: [PATCH 54/76] chore(deps): bump requests from 2.28.2 to 2.31.0 (#2308) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3cfc254af7e..facfc231082 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2565,21 +2565,21 @@ files = [ [[package]] name = "requests" -version = "2.28.2" +version = "2.31.0" description = "Python HTTP for Humans." category = "dev" optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.7" files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] From 1995d0fa99706d5c7ca256a1c76b9c972ee7ad76 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 23 May 2023 10:19:54 +0200 Subject: [PATCH 55/76] chore(ci): source code tampering protection for release (#2301) * chore: create download-artifact, upload-artifact * chore: seals source code, separate quality check & build Uses new mechanism to seal source code, run quality checks separately, and always reuses sealed source code in every step to prevent source code tampering. It documents every job purpose, and creates a new faster action to upload and download artifacts. * chore: ruben's feedback Co-authored-by: Ruben Fonseca Signed-off-by: Heitor Lessa * chore: document remaining sections; update release process doc * chore: include python bytecode in tarball for accurate hash verification * chore: add hash verification * chore: cleanup before review * chore: fix build integrity hash reference * chore: upgrade download-artifact to v3 due to node deprecation warnings --------- Signed-off-by: Heitor Lessa Co-authored-by: Ruben Fonseca --- .github/actions/download-artifact/action.yml | 58 ++++ .github/actions/upload-artifact/action.yml | 82 +++++ .github/workflows/release.yml | 318 ++++++++++++++----- 3 files changed, 387 insertions(+), 71 deletions(-) create mode 100644 .github/actions/download-artifact/action.yml create mode 100644 .github/actions/upload-artifact/action.yml diff --git a/.github/actions/download-artifact/action.yml b/.github/actions/download-artifact/action.yml new file mode 100644 index 00000000000..ef938ddb684 --- /dev/null +++ b/.github/actions/download-artifact/action.yml @@ -0,0 +1,58 @@ +name: Download artifact +description: Wrapper around GitHub's official action, with additional extraction before download + +# PROCESS +# +# 1. Downloads artifact using actions/download-artifact action +# 2. Extracts and overwrites tarball previously uploaded +# 3. Remove archive after extraction + +# NOTES +# +# Upload-artifact and download-artifact takes ~2m40s to upload 8MB +# so this is custom action cuts down the entire operation to 1s +# by uploading/extracting a tarball while relying on the official upload-artifact/download-artifact actions +# + +# USAGE +# +# NOTE: Meant to be used with ./.github/actions/upload-artifact +# +# - name: Restore sealed source code +# uses: ./.github/actions/download-artifact +# with: +# name: ${{ needs.seal.outputs.INTEGRITY_HASH }} +# path: . + +# https://github.com/actions/download-artifact/blob/main/action.yml +inputs: + name: + description: Artifact name + required: true + path: + description: Destination path. By default, it will download to the current working directory. + required: false + default: . + +runs: + using: composite + steps: + - name: Download artifacts + uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 + with: + name: ${{ inputs.name }} + path: ${{ inputs.path }} + + - name: Extract artifacts + run: tar -xvf "${ARCHIVE}" + env: + ARCHIVE: ${{ inputs.name }}.tar + shell: bash + working-directory: ${{ inputs.path }} + + - name: Remove archive + run: rm -f "${ARCHIVE}" + env: + ARCHIVE: ${{ inputs.name }}.tar + shell: bash + working-directory: ${{ inputs.path }} diff --git a/.github/actions/upload-artifact/action.yml b/.github/actions/upload-artifact/action.yml new file mode 100644 index 00000000000..ffa18cc0723 --- /dev/null +++ b/.github/actions/upload-artifact/action.yml @@ -0,0 +1,82 @@ +name: Upload artifact +description: Wrapper around GitHub's official action, with additional archiving before upload + +# PROCESS +# +# 1. Creates tarball excluding .git files +# 2. Uploads tarball using actions/upload-artifact action, fail CI job if no file is found +# 3. Remove archive after uploading it. + +# NOTES +# +# Upload-artifact and download-artifact takes ~2m40s to upload 8MB +# so this is custom action cuts down the entire operation to 1s +# by uploading/extracting a tarball while relying on the official upload-artifact/download-artifact actions +# + +# USAGE +# +# NOTE: Meant to be used with ./.github/actions/download-artifact +# +# - name: Upload sealed source code +# uses: ./.github/actions/upload-artifact +# with: +# name: ${{ steps.integrity.outputs.INTEGRITY_HASH }} +# path: . + +# https://github.com/actions/upload-artifact/blob/main/action.yml +inputs: + name: + description: Artifact name + required: true + path: + description: > + A file, directory or wildcard pattern that describes what to upload. + + You can pass multiple paths separated by space (e.g., dir1 dir2 file.txt). + + Paths and wildcard patterns must be tar command compatible. + required: true + retention-days: + description: > + Artifact retention in days. By default 1 day, max of 90 days, and 0 honours default repo retention. + + You can change max days in the repository settings. + required: false + default: "1" + if-no-files-found: + description: > + Action to perform if no files are found: warn, error, ignore. By default, it fails fast with 'error'. + + Options: + warn: Output a warning but do not fail the action + error: Fail the action with an error message + ignore: Do not output any warnings or errors, the action does not fail + required: false + default: error + +runs: + using: composite + steps: + - name: Archive artifacts + run: | + tar --exclude-vcs \ + -cvf "${ARCHIVE}" "${PATH_TO_ARCHIVE}" + env: + ARCHIVE: ${{ inputs.name }}.tar + PATH_TO_ARCHIVE: ${{ inputs.path }} + shell: bash + + - name: Upload artifacts + uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + with: + if-no-files-found: ${{ inputs.if-no-files-found }} + name: ${{ inputs.name }} + path: ${{ inputs.name }}.tar + retention-days: ${{ inputs.retention-days }} + + - name: Remove archive + run: rm -f "${ARCHIVE}" + env: + ARCHIVE: ${{ inputs.name }}.tar + shell: bash diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 789104dd6db..d5f22affe18 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -4,15 +4,17 @@ name: Release # # === Automated activities === # -# 1. Run tests, linting, security and complexity base line -# 2. Bump package version and build release artifact -# 3. Publish package to PyPi prod repository using cached artifact -# 4. Compile Layer and kick off pipeline for beta, prod, and canary releases -# 5. Update docs with latest Layer ARNs and Changelog -# 6. Create PR to update trunk so staged docs also point to the latest Layer ARN, when merged -# 7. Builds a new user guide and API docs with release version; update /latest pointing to newly released version -# 8. Create PR to update package version on trunk -# 9. Close all issues labeled "pending-release" and notify customers about the release +# 1. [Seal] Bump to release version and export source code with integrity hash +# 2. [Quality check] Restore sealed source code, run tests, linting, security and complexity base line +# 3. [Build] Restore sealed source code, create and export hashed build artifact for PyPi release (wheel, tarball) +# 4. [Release] Restore built artifact, and publish package to PyPi prod repository +# 5. [Create Tag] Restore sealed source code, and create a new git tag using released version +# 6. [PR to bump version] Restore sealed source code, and create a PR to update trunk with latest released project metadata +# 7. [Publish Layer] Compile Layer and kick off pipeline for beta, prod, and canary releases +# 8. [Publish Layer] Update docs with latest Layer ARNs and Changelog +# 9. [Publish Layer] Create PR to update trunk so staged docs also point to the latest Layer ARN, when merged +# 10. [Publish Layer] Builds a new user guide and API docs with release version; update /latest pointing to newly released version +# 11. [Post release] Close all issues labeled "pending-release" and notify customers about the release # # === Manual activities === # @@ -23,8 +25,8 @@ name: Release # See MAINTAINERS.md "Releasing a new version" for release mechanisms env: - BRANCH: develop - ORIGIN: awslabs/aws-lambda-powertools-python + RELEASE_COMMIT: ${{ github.sha }} + RELEASE_TAG_VERSION: ${{ inputs.version_to_publish }} on: workflow_dispatch: @@ -50,72 +52,191 @@ on: required: false jobs: - build: - runs-on: aws-lambda-powertools_ubuntu-latest_4-core + + # This job bumps the package version to the release version + # creates an integrity hash from the source code + # uploads the artifact with the integrity hash as the key name + # so subsequent jobs can restore from a trusted point in time to prevent tampering + seal: + runs-on: ubuntu-latest permissions: contents: read outputs: - RELEASE_VERSION: ${{ steps.release_version.outputs.RELEASE_VERSION }} + SOURCE_CODE_HASH: ${{ steps.integrity.outputs.SOURCE_CODE_HASH }} + RELEASE_VERSION: ${{ steps.release_version.outputs.RELEASE_VERSION }} + steps: + - name: Export release version + id: release_version + # transform tag format `v` + run: | + RELEASE_VERSION="${RELEASE_TAG_VERSION:1}" + echo "RELEASE_VERSION=${RELEASE_VERSION}" >> "$GITHUB_OUTPUT" + + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + with: + ref: ${{ env.RELEASE_COMMIT }} + + # We use a pinned version of Poetry to be certain it won't modify source code before we create a hash + - name: Install poetry + run: pipx install git+https://github.com/python-poetry/poetry@68b88e5390720a3dd84f02940ec5200bfce39ac6 # v1.5.0 + + - name: Bump package version + id: versioning + run: poetry version "${RELEASE_VERSION}" + env: + RELEASE_VERSION: ${{ steps.release_version.outputs.RELEASE_VERSION}} + + - name: Create integrity hash + id: integrity + run: echo "SOURCE_CODE_HASH=${HASH}" >> "$GITHUB_OUTPUT" + env: + # paths to hash and why they're important to protect + # + # aws_lambda_powertools/ - source code + # pyproject.toml - project metadata + # poetry.lock - project dependencies + # layer/ - layer infrastructure and pipeline + # .github/ - github scripts and actions used in the release + # docs/ - user guide documentation + # examples/ - user guide code snippets + HASH: ${{ hashFiles('aws_lambda_powertools/**', 'pyproject.toml', 'poetry.lock', 'layer/**', '.github/**', 'docs/**', 'examples/**')}} + + - name: Upload sealed source code + uses: ./.github/actions/upload-artifact + with: + name: source-${{ steps.integrity.outputs.SOURCE_CODE_HASH }} + path: . + + + # This job runs our automated test suite, complexity and security baselines + # it ensures previously merged have been tested as part of the pull request process + # + # NOTE + # + # we don't upload the artifact after testing to prevent any tampering of our source code dependencies + quality_check: + needs: seal + runs-on: ubuntu-latest + permissions: + contents: read env: - RELEASE_TAG_VERSION: ${{ inputs.version_to_publish }} + SOURCE_INTEGRITY_HASH: ${{ needs.seal.outputs.SOURCE_CODE_HASH }} steps: + # NOTE: we need actions/checkout to configure git first (pre-commit hooks in make dev) - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: - fetch-depth: 0 + ref: ${{ env.RELEASE_COMMIT }} + + - name: Restore sealed source code + uses: ./.github/actions/download-artifact + with: + name: source-${{ env.SOURCE_INTEGRITY_HASH }} + path: . + + - name: Debug cache restore + run: cat pyproject.toml + - name: Install poetry - run: pipx install poetry + run: pipx install git+https://github.com/python-poetry/poetry@68b88e5390720a3dd84f02940ec5200bfce39ac6 # v1.5.0 - name: Set up Python uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 with: python-version: "3.10" cache: "poetry" - - name: Set release notes tag - id: release_version - # transform tag format `v> "$GITHUB_ENV" - echo "RELEASE_VERSION=${RELEASE_VERSION}" >> "$GITHUB_OUTPUT" - name: Install dependencies run: make dev - name: Run all tests, linting and baselines - if: ${{ !inputs.skip_code_quality }} run: make pr - - name: Bump package version - id: versioning - run: poetry version "${RELEASE_VERSION}" + + # This job creates a release artifact (tar.gz, wheel) + # it checks out code from release commit for custom actions to work + # then restores the sealed source code (overwrites any potential tampering) + # it's done separately from release job to enforce least privilege. + # We export just the final build artifact for release (release-) + build: + runs-on: ubuntu-latest + needs: [quality_check, seal] + permissions: + contents: read + outputs: + BUILD_INTEGRITY_HASH: ${{ steps.integrity.outputs.BUILD_INTEGRITY_HASH }} + env: + SOURCE_INTEGRITY_HASH: ${{ needs.seal.outputs.SOURCE_CODE_HASH }} + steps: + # NOTE: we need actions/checkout to configure git first (pre-commit hooks in make dev) + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + with: + ref: ${{ env.RELEASE_COMMIT }} + + - name: Restore sealed source code + uses: ./.github/actions/download-artifact + with: + name: source-${{ env.SOURCE_INTEGRITY_HASH }} + path: . + + - name: Install poetry + run: pipx install git+https://github.com/python-poetry/poetry@68b88e5390720a3dd84f02940ec5200bfce39ac6 # v1.5.0 + - name: Set up Python + uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + with: + python-version: "3.10" + cache: "poetry" + - name: Build python package and wheel run: poetry build - - name: Cache release artifact - id: cache-release-build - uses: actions/cache/save@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 + # NOTE: Ran out of time to create a composite action out of this + # because GitHub Action inputs do not support arrays and it became fragile to join multiple strings then split + # keeping these hard coded for now until we have a cleaner way to reuse files/dirs we want to hash + - name: Source code tampering check + run: test "${SOURCE_INTEGRITY_HASH}" = "${CURRENT_HASH}" || exit 1 + env: + CURRENT_HASH: ${{ hashFiles('aws_lambda_powertools/**', 'pyproject.toml', 'poetry.lock', 'layer/**', '.github/**', 'docs/**', 'examples/**')}} + + - name: Create integrity hash for build artifact + id: integrity + run: echo "BUILD_INTEGRITY_HASH=${HASH}" >> "$GITHUB_OUTPUT" + env: + # paths to hash and why they're important to protect + # + # dist/ - package distribution build + HASH: ${{ hashFiles('dist/**')}} + + - name: Upload build artifact + uses: ./.github/actions/upload-artifact with: + name: build-${{ steps.integrity.outputs.BUILD_INTEGRITY_HASH}} path: dist/ - # NOTE: cache key uses a hash of (Runner OS + Version to be released + Deps) - # since a new release might not change a dependency but version - # otherwise we might accidentally reuse a previously cached artifact for a newer release. - # The reason we don't add pyproject.toml here is to avoid racing conditions - # where git checkout might happen too fast and doesn't pick up the latest version - # and also future-proof for when we switch to protected branch and update via PR - key: ${{ runner.os }}-${{ env.RELEASE_VERSION }}-${{ hashFiles('**/poetry.lock') }} + # This job uses release artifact to publish to PyPi + # it exchanges JWT tokens with GitHub to obtain PyPi credentials + # since it's already registered as a Trusted Publisher. + # It uses the sealed build artifact (.whl, .tar.gz) to release it release: - needs: build + needs: [build, seal] environment: release - runs-on: aws-lambda-powertools_ubuntu-latest_4-core + runs-on: ubuntu-latest permissions: id-token: write # OIDC for PyPi Trusted Publisher feature env: - RELEASE_VERSION: ${{ needs.build.outputs.RELEASE_VERSION }} + RELEASE_VERSION: ${{ needs.seal.outputs.RELEASE_VERSION }} + BUILD_INTEGRITY_HASH: ${{ needs.build.outputs.BUILD_INTEGRITY_HASH }} steps: + # NOTE: we need actions/checkout in order to use our local actions (e.g., ./.github/actions) - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - name: Restore release artifact from cache - id: restore-release-build - uses: actions/cache/restore@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 with: - path: dist/ - key: ${{ runner.os }}-${{ env.RELEASE_VERSION }}-${{ hashFiles('**/poetry.lock') }} + ref: ${{ env.RELEASE_COMMIT }} + + - name: Restore sealed build + uses: ./.github/actions/download-artifact + with: + name: build-${{ env.BUILD_INTEGRITY_HASH }} + path: . + + - name: Source code tampering check + run: test "${BUILD_INTEGRITY_HASH}" = "${CURRENT_HASH}" || exit 1 + env: + CURRENT_HASH: ${{ hashFiles('dist/**')}} - name: Upload to PyPi prod if: ${{ !inputs.skip_pypi }} @@ -128,65 +249,118 @@ jobs: # with: # repository-url: https://test.pypi.org/legacy/ + # We create a Git Tag using our release version (e.g., v2.16.0) + # using our sealed source code we created earlier. + # Because we bumped version of our project as part of CI + # we need to add this into git before pushing the tag + # otherwise the release commit will be used as the basis for the tag. + # Later, we create a PR to update trunk with our newest release version (e.g., bump_version job) create_tag: - needs: [build, release] + needs: [release, seal] runs-on: ubuntu-latest permissions: contents: write env: - RELEASE_VERSION: ${{ needs.build.outputs.RELEASE_VERSION }} + SOURCE_INTEGRITY_HASH: ${{ needs.seal.outputs.SOURCE_CODE_HASH }} steps: + # NOTE: we need actions/checkout to authenticate and configure git first - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + with: + ref: ${{ env.RELEASE_COMMIT }} + + - name: Restore sealed source code + uses: ./.github/actions/download-artifact + with: + name: source-${{ env.SOURCE_INTEGRITY_HASH }} + path: . + + # NOTE: Ran out of time to create a composite action out of this + # because GitHub Action inputs do not support arrays and it became fragile when making it reusable with strings + # keeping these hard coded for now until we have a cleaner way to reuse files/dirs we want to hash + - name: Source code tampering check + run: test "${SOURCE_INTEGRITY_HASH}" = "${CURRENT_HASH}" || exit 1 + env: + CURRENT_HASH: ${{ hashFiles('aws_lambda_powertools/**', 'pyproject.toml', 'poetry.lock', 'layer/**', '.github/**', 'docs/**', 'examples/**')}} + - id: setup-git name: Git client setup and refresh tip run: | git config user.name "Powertools bot" git config user.email "aws-lambda-powertools-feedback@amazon.com" git config remote.origin.url >&- + - name: Create Git Tag run: | + git add pyproject.toml + git commit -m "chore: version bump" git tag -a v"${RELEASE_VERSION}" -m "release_version: v${RELEASE_VERSION}" git push origin v"${RELEASE_VERSION}" + env: + RELEASE_VERSION: ${{ needs.seal.outputs.RELEASE_VERSION }} - # NOTE: Watch out for the depth limit of 4 nested workflow_calls. - # publish_layer -> publish_v2_layer -> reusable_deploy_v2_layer_stack - publish_layer: - needs: [build, release, create_tag] - secrets: inherit - permissions: - id-token: write - contents: write - pages: write - pull-requests: write - uses: ./.github/workflows/publish_v2_layer.yml - with: - latest_published_version: ${{ needs.build.outputs.RELEASE_VERSION }} - pre_release: ${{ inputs.pre_release }} - + # Creates a PR with the latest version we've just released + # since our trunk is protected against any direct pushes from automation bump_version: - needs: [build, release] + needs: [release, seal] permissions: contents: write # create-pr action creates a temporary branch pull-requests: write # create-pr action creates a PR using the temporary branch runs-on: ubuntu-latest env: - RELEASE_VERSION: ${{ needs.build.outputs.RELEASE_VERSION }} + SOURCE_INTEGRITY_HASH: ${{ needs.seal.outputs.SOURCE_CODE_HASH }} steps: + # NOTE: we need actions/checkout to authenticate and configure git first - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - name: Bump package version - id: versioning - run: poetry version "${RELEASE_VERSION}" + with: + ref: ${{ env.RELEASE_COMMIT }} + + - name: Restore sealed source code + uses: ./.github/actions/download-artifact + with: + name: source-${{ env.SOURCE_INTEGRITY_HASH }} + path: . + + # NOTE: Ran out of time to create a composite action out of this + # because GitHub Action inputs do not support arrays and it became fragile when making it reusable with strings + # keeping these hard coded for now until we have a cleaner way to reuse files/dirs we want to hash + - name: Source code tampering check + run: test "${SOURCE_INTEGRITY_HASH}" = "${CURRENT_HASH}" || exit 1 + env: + CURRENT_HASH: ${{ hashFiles('aws_lambda_powertools/**', 'pyproject.toml', 'poetry.lock', 'layer/**', '.github/**', 'docs/**', 'examples/**')}} + - name: Create PR id: create-pr uses: ./.github/actions/create-pr with: files: "pyproject.toml" temp_branch_prefix: "ci-bump" - pull_request_title: "chore(ci): bump version to ${{ env.RELEASE_VERSION }}" + pull_request_title: "chore(ci): bump version to ${{ needs.seal.outputs.RELEASE_VERSION }}" github_token: ${{ secrets.GITHUB_TOKEN }} + # This job compiles a Lambda Layer optimized for space and speed (e.g., Cython) + # It then deploys to Layer's Beta and Prod account, including SAR Beta and Prod account. + # It uses canaries to attest Layers can be used and imported between stages. + # Lastly, it updates our documentation with the latest Layer ARN for all regions + # + # NOTE + # + # Watch out for the depth limit of 4 nested workflow_calls. + # publish_layer -> publish_v2_layer -> reusable_deploy_v2_layer_stack + publish_layer: + needs: [seal, release, create_tag] + secrets: inherit + permissions: + id-token: write + contents: write + pages: write + pull-requests: write + uses: ./.github/workflows/publish_v2_layer.yml + with: + latest_published_version: ${{ needs.seal.outputs.RELEASE_VERSION }} + pre_release: ${{ inputs.pre_release }} + post_release: - needs: [build, release, publish_layer] + needs: [seal, release, publish_layer] permissions: contents: read issues: write @@ -194,9 +368,11 @@ jobs: pull-requests: write runs-on: ubuntu-latest env: - RELEASE_VERSION: ${{ needs.build.outputs.RELEASE_VERSION }} + RELEASE_VERSION: ${{ needs.seal.outputs.RELEASE_VERSION }} steps: - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + with: + ref: ${{ env.RELEASE_COMMIT }} - name: Close issues related to this release uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 with: From 61a502821352861b7d82f5389e61a54349edc6e3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 May 2023 23:14:37 +0100 Subject: [PATCH 56/76] chore(deps): bump pydantic from 1.10.7 to 1.10.8 (#2316) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 74 ++++++++++++++++++++++++++--------------------------- 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/poetry.lock b/poetry.lock index facfc231082..5d4aff79afb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2033,48 +2033,48 @@ files = [ [[package]] name = "pydantic" -version = "1.10.7" +version = "1.10.8" description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"}, - {file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"}, - {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"}, - {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"}, - {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"}, - {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"}, - {file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"}, - {file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"}, - {file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"}, - {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"}, - {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"}, - {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"}, - {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"}, - {file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"}, - {file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"}, - {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"}, - {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"}, - {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"}, - {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"}, - {file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"}, - {file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"}, - {file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"}, - {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"}, - {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"}, - {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"}, - {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"}, - {file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"}, - {file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"}, - {file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"}, - {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"}, - {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"}, - {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"}, - {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"}, - {file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"}, - {file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"}, - {file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"}, + {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"}, + {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"}, + {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"}, + {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"}, + {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"}, + {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"}, + {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"}, + {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"}, + {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"}, + {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"}, + {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"}, + {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"}, + {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"}, + {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"}, + {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"}, + {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"}, + {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"}, + {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"}, + {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"}, + {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"}, + {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"}, + {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"}, + {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"}, + {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"}, + {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"}, + {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"}, + {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"}, + {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"}, + {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"}, + {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"}, + {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"}, + {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"}, + {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"}, + {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"}, + {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"}, + {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"}, ] [package.dependencies] From ff5bea8f06b45eae53a59eec5cc13cd49ac1f81c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 May 2023 23:15:04 +0100 Subject: [PATCH 57/76] chore(deps-dev): bump sentry-sdk from 1.23.1 to 1.24.0 (#2314) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5d4aff79afb..50d17286527 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2657,14 +2657,14 @@ pbr = "*" [[package]] name = "sentry-sdk" -version = "1.23.1" +version = "1.24.0" description = "Python client for Sentry (https://sentry.io)" category = "dev" optional = false python-versions = "*" files = [ - {file = "sentry-sdk-1.23.1.tar.gz", hash = "sha256:0300fbe7a07b3865b3885929fb863a68ff01f59e3bcfb4e7953d0bf7fd19c67f"}, - {file = "sentry_sdk-1.23.1-py2.py3-none-any.whl", hash = "sha256:a884e2478e0b055776ea2b9234d5de9339b4bae0b3a5e74ae43d131db8ded27e"}, + {file = "sentry-sdk-1.24.0.tar.gz", hash = "sha256:0bbcecda9f51936904c1030e7fef0fe693e633888f02a14d1cb68646a50e83b3"}, + {file = "sentry_sdk-1.24.0-py2.py3-none-any.whl", hash = "sha256:56d6d9d194c898d853a7c1dd99bed92ce82334ee1282292c15bcc967ff1a49b5"}, ] [package.dependencies] From 5a4aa44e61155ba58c5884ed67d9fa63731de78a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 May 2023 23:15:23 +0100 Subject: [PATCH 58/76] chore(deps-dev): bump types-requests from 2.30.0.0 to 2.31.0.0 (#2315) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 50d17286527..1dcf4cbe6d3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2841,14 +2841,14 @@ files = [ [[package]] name = "types-requests" -version = "2.30.0.0" +version = "2.31.0.0" description = "Typing stubs for requests" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-requests-2.30.0.0.tar.gz", hash = "sha256:dec781054324a70ba64430ae9e62e7e9c8e4618c185a5cb3f87a6738251b5a31"}, - {file = "types_requests-2.30.0.0-py3-none-any.whl", hash = "sha256:c6cf08e120ca9f0dc4fa4e32c3f953c3fba222bcc1db6b97695bce8da1ba9864"}, + {file = "types-requests-2.31.0.0.tar.gz", hash = "sha256:c1c29d20ab8d84dff468d7febfe8e0cb0b4664543221b386605e14672b44ea25"}, + {file = "types_requests-2.31.0.0-py3-none-any.whl", hash = "sha256:7c5cea7940f8e92ec560bbc468f65bf684aa3dcf0554a6f8c4710f5f708dc598"}, ] [package.dependencies] @@ -3078,4 +3078,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "313ebc539d7ce406d5c06a3a73fb571a882c7115976c17dc7e18e73a238a26f4" +content-hash = "37442dfae36e56f78e886c158c022c73c1316308ff794e021960408e8abcd1ac" diff --git a/pyproject.toml b/pyproject.toml index 1ac5b04408d..e19c3fdbb71 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,7 +80,7 @@ mypy-boto3-secretsmanager = "^1.26.135" mypy-boto3-ssm = "^1.26.97" mypy-boto3-s3 = "^1.26.127" mypy-boto3-xray = "^1.26.122" -types-requests = "^2.30.0" +types-requests = "^2.31.0" typing-extensions = "^4.4.0" mkdocs-material = "^9.1.14" filelock = "^3.12.0" From ab47c9263e875c4e0ac6f554fd05d02929c6bd18 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 24 May 2023 09:48:59 +0100 Subject: [PATCH 59/76] feat(parser): add support for parsing SQS events wrapped in Kinesis Firehose (#2294) --- .../utilities/parser/models/__init__.py | 3 ++ .../parser/models/kinesis_firehose_sqs.py | 29 +++++++++++++++++++ docs/utilities/parser.md | 1 + tests/events/kinesisFirehoseSQSEvent.json | 12 ++++++++ .../parser/test_kinesis_firehose.py | 24 +++++++++++++++ 5 files changed, 69 insertions(+) create mode 100644 aws_lambda_powertools/utilities/parser/models/kinesis_firehose_sqs.py create mode 100644 tests/events/kinesisFirehoseSQSEvent.json diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py index 5f7a8a6b550..c2385b7bf14 100644 --- a/aws_lambda_powertools/utilities/parser/models/__init__.py +++ b/aws_lambda_powertools/utilities/parser/models/__init__.py @@ -42,6 +42,7 @@ KinesisFirehoseRecord, KinesisFirehoseRecordMetadata, ) +from .kinesis_firehose_sqs import KinesisFirehoseSqsModel, KinesisFirehoseSqsRecord from .lambda_function_url import LambdaFunctionUrlModel from .s3 import ( S3EventNotificationEventBridgeDetailModel, @@ -144,4 +145,6 @@ "KafkaRecordModel", "KafkaMskEventModel", "KafkaBaseEventModel", + "KinesisFirehoseSqsModel", + "KinesisFirehoseSqsRecord", ] diff --git a/aws_lambda_powertools/utilities/parser/models/kinesis_firehose_sqs.py b/aws_lambda_powertools/utilities/parser/models/kinesis_firehose_sqs.py new file mode 100644 index 00000000000..b649828853b --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/kinesis_firehose_sqs.py @@ -0,0 +1,29 @@ +import json +from typing import List, Optional + +from pydantic import BaseModel, PositiveInt, validator + +from aws_lambda_powertools.shared.functions import base64_decode +from aws_lambda_powertools.utilities.parser.models import KinesisFirehoseRecordMetadata + +from .sqs import SqsRecordModel + + +class KinesisFirehoseSqsRecord(BaseModel): + data: SqsRecordModel + recordId: str + approximateArrivalTimestamp: PositiveInt + kinesisRecordMetadata: Optional[KinesisFirehoseRecordMetadata] + + @validator("data", pre=True, allow_reuse=True) + def data_base64_decode(cls, value): + # Firehose payload is encoded + return json.loads(base64_decode(value)) + + +class KinesisFirehoseSqsModel(BaseModel): + invocationId: str + deliveryStreamArn: str + region: str + sourceKinesisStreamArn: Optional[str] + records: List[KinesisFirehoseSqsRecord] diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 38e12c0792d..6607e7b07b0 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -168,6 +168,7 @@ Parser comes with the following built-in models: | **KafkaSelfManagedEventModel** | Lambda Event Source payload for self managed Kafka payload | | **KinesisDataStreamModel** | Lambda Event Source payload for Amazon Kinesis Data Streams | | **KinesisFirehoseModel** | Lambda Event Source payload for Amazon Kinesis Firehose | +| **KinesisFirehoseSqsModel** | Lambda Event Source payload for SQS messages wrapped in Kinesis Firehose records | | **LambdaFunctionUrlModel** | Lambda Event Source payload for Lambda Function URL payload | | **S3EventNotificationEventBridgeModel** | Lambda Event Source payload for Amazon S3 Event Notification to EventBridge. | | **S3Model** | Lambda Event Source payload for Amazon S3 | diff --git a/tests/events/kinesisFirehoseSQSEvent.json b/tests/events/kinesisFirehoseSQSEvent.json new file mode 100644 index 00000000000..bea267c4206 --- /dev/null +++ b/tests/events/kinesisFirehoseSQSEvent.json @@ -0,0 +1,12 @@ +{ + "invocationId": "556b67a3-48fc-4385-af49-e133aade9cb9", + "deliveryStreamArn": "arn:aws:firehose:us-east-1:123456789012:deliverystream/PUT-S3-tdyyE", + "region": "us-east-1", + "records": [ + { + "recordId": "49640912821178817833517986466168945147170627572855734274000000", + "approximateArrivalTimestamp": 1684864917398, + "data": "eyJtZXNzYWdlSWQiOiI1YWI4MDdkNC01NjQ0LTRjNTUtOTdhMy00NzM5NjYzNWFjNzQiLCJyZWNlaXB0SGFuZGxlIjoiQVFFQndKbkt5ckhpZ1VNWmo2cllpZ0NneGxhUzNTTHkwYS4uLiIsImJvZHkiOiJUZXN0IG1lc3NhZ2UuIiwiYXR0cmlidXRlcyI6eyJBcHByb3hpbWF0ZVJlY2VpdmVDb3VudCI6IjEiLCJTZW50VGltZXN0YW1wIjoiMTY4NDg2NDg1MjQ5MSIsIlNlbmRlcklkIjoiQUlEQUlFTlFaSk9MTzIzWVZKNFZPIiwiQXBwcm94aW1hdGVGaXJzdFJlY2VpdmVUaW1lc3RhbXAiOiIxNjg0ODY0ODcyNDkxIn0sIm1lc3NhZ2VBdHRyaWJ1dGVzIjp7fSwibWQ1T2ZNZXNzYWdlQXR0cmlidXRlcyI6bnVsbCwibWQ1T2ZCb2R5IjoiYzhiNmJjNjBjOGI4YjNhOTA0ZTQ1YzFmYWJkZjUyM2QiLCJldmVudFNvdXJjZSI6ImF3czpzcXMiLCJldmVudFNvdXJjZUFSTiI6ImFybjphd3M6c3FzOnVzLWVhc3QtMToyMDA5ODQxMTIzODY6U05TIiwiYXdzUmVnaW9uIjoidXMtZWFzdC0xIn0K" + } + ] +} diff --git a/tests/functional/parser/test_kinesis_firehose.py b/tests/functional/parser/test_kinesis_firehose.py index 59bbd2f4e18..c0b71f80540 100644 --- a/tests/functional/parser/test_kinesis_firehose.py +++ b/tests/functional/parser/test_kinesis_firehose.py @@ -11,6 +11,8 @@ KinesisFirehoseModel, KinesisFirehoseRecord, KinesisFirehoseRecordMetadata, + KinesisFirehoseSqsModel, + KinesisFirehoseSqsRecord, ) from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.parser.schemas import MyKinesisFirehoseBusiness @@ -77,6 +79,28 @@ def handle_firehose_no_envelope_put(event: KinesisFirehoseModel, _: LambdaContex assert record_02.data == b'{"Hello": "World"}' +@event_parser(model=KinesisFirehoseSqsModel) +def handle_firehose_sqs_wrapped_message(event: KinesisFirehoseSqsModel, _: LambdaContext): + assert event.region == "us-east-1" + assert event.invocationId == "556b67a3-48fc-4385-af49-e133aade9cb9" + assert event.deliveryStreamArn == "arn:aws:firehose:us-east-1:123456789012:deliverystream/PUT-S3-tdyyE" + + records = list(event.records) + assert len(records) == 1 + + record_01: KinesisFirehoseSqsRecord = records[0] + assert record_01.data.messageId == "5ab807d4-5644-4c55-97a3-47396635ac74" + assert record_01.data.receiptHandle == "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a..." + assert record_01.data.body == "Test message." + assert record_01.data.attributes.ApproximateReceiveCount == "1" + assert record_01.data.attributes.SenderId == "AIDAIENQZJOLO23YVJ4VO" + + +def test_firehose_sqs_wrapped_message_event(): + event_dict = load_event("kinesisFirehoseSQSEvent.json") + handle_firehose_sqs_wrapped_message(event_dict, LambdaContext()) + + def test_firehose_trigger_event(): event_dict = load_event("kinesisFirehoseKinesisEvent.json") event_dict["records"].pop(0) # remove first item since the payload is bytes and we want to test payload json class From 55738b9a86246461c03b5cf797011e537e849860 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 24 May 2023 10:27:04 +0100 Subject: [PATCH 60/76] fix(logger): add setLevel function to set level programmatically (#2320) --- aws_lambda_powertools/logging/logger.py | 19 +++++++++++++++++-- tests/functional/test_logger.py | 17 +++++++++++++++++ 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index f9c33c70ba6..88ba8122da9 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -91,7 +91,9 @@ class Logger(logging.Logger): # lgtm [py/missing-call-to-init] service : str, optional service name to be appended in logs, by default "service_undefined" level : str, int optional - logging.level, by default "INFO" + The level to set. Can be a string representing the level name: 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL' + or an integer representing the level value: 10 for 'DEBUG', 20 for 'INFO', 30 for 'WARNING', 40 for 'ERROR', 50 for 'CRITICAL'. # noqa: E501 + by default "INFO" child: bool, optional create a child Logger named ., False by default sample_rate: float, optional @@ -327,7 +329,7 @@ def _configure_sampling(self): try: if self.sampling_rate and random.random() <= float(self.sampling_rate): logger.debug("Setting log level to Debug due to sampling rate") - self.log_level = logging.DEBUG + self.setLevel(logging.DEBUG) except ValueError: raise InvalidLoggerSamplingRateError( f"Expected a float value ranging 0 to 1, but received {self.sampling_rate} instead." @@ -443,6 +445,19 @@ def decorate(event, context, *args, **kwargs): return decorate + def setLevel(self, level: Union[str, int]): + """ + Set the logging level for the logger. + + Parameters: + ----------- + level str | int + The level to set. Can be a string representing the level name: 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL' + or an integer representing the level value: 10 for 'DEBUG', 20 for 'INFO', 30 for 'WARNING', 40 for 'ERROR', 50 for 'CRITICAL'. # noqa: E501 + """ + self.log_level = level + self._logger.setLevel(level) + def info( self, msg: object, diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py index 412a9358553..e6dbf7ebbb8 100644 --- a/tests/functional/test_logger.py +++ b/tests/functional/test_logger.py @@ -379,6 +379,23 @@ def test_logger_level_env_var_as_int(monkeypatch, service_name): Logger(service=service_name) +def test_logger_switch_between_levels(stdout, service_name): + # GIVEN a Loggers is initialized with INFO level + logger = Logger(service=service_name, level="INFO", stream=stdout) + logger.info("message info") + + # WHEN we switch to DEBUG level + logger.setLevel(level="DEBUG") + logger.debug("message debug") + + # THEN we must have different levels and messages in stdout + log_output = capture_multiple_logging_statements_output(stdout) + assert log_output[0]["level"] == "INFO" + assert log_output[0]["message"] == "message info" + assert log_output[1]["level"] == "DEBUG" + assert log_output[1]["message"] == "message debug" + + def test_logger_record_caller_location(stdout, service_name): # GIVEN Logger is initialized logger = Logger(service=service_name, stream=stdout) From a065f52440da28752192764183231db991cfd9b3 Mon Sep 17 00:00:00 2001 From: Simon Thulbourn Date: Wed, 24 May 2023 15:59:04 +0200 Subject: [PATCH 61/76] fix(docs): use concrete secrets from settings (#2322) * fix(docs): use concrete secrets from settings * pass environment maybe * remove input * add actual env name Signed-off-by: Simon Thulbourn --------- Signed-off-by: Simon Thulbourn --- .github/workflows/on_push_docs.yml | 3 ++- .github/workflows/publish_v2_layer.yml | 1 + .github/workflows/rebuild_latest_docs.yml | 1 + .github/workflows/reusable_publish_docs.yml | 2 +- 4 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/on_push_docs.yml b/.github/workflows/on_push_docs.yml index e257f49e808..b6051f8fa94 100644 --- a/.github/workflows/on_push_docs.yml +++ b/.github/workflows/on_push_docs.yml @@ -19,7 +19,8 @@ jobs: contents: write pages: write id-token: write + secrets: inherit uses: ./.github/workflows/reusable_publish_docs.yml with: version: develop - alias: stage + alias: stage \ No newline at end of file diff --git a/.github/workflows/publish_v2_layer.yml b/.github/workflows/publish_v2_layer.yml index 5447d0a51e5..0046a6bb84d 100644 --- a/.github/workflows/publish_v2_layer.yml +++ b/.github/workflows/publish_v2_layer.yml @@ -224,6 +224,7 @@ jobs: pages: write pull-requests: none id-token: write + secrets: inherit uses: ./.github/workflows/reusable_publish_docs.yml with: version: ${{ inputs.latest_published_version }} diff --git a/.github/workflows/rebuild_latest_docs.yml b/.github/workflows/rebuild_latest_docs.yml index deac728ac25..fc65bd71673 100644 --- a/.github/workflows/rebuild_latest_docs.yml +++ b/.github/workflows/rebuild_latest_docs.yml @@ -23,6 +23,7 @@ jobs: contents: write pages: write id-token: write + secrets: inherit uses: ./.github/workflows/reusable_publish_docs.yml with: version: ${{ inputs.latest_published_version }} diff --git a/.github/workflows/reusable_publish_docs.yml b/.github/workflows/reusable_publish_docs.yml index f624d327ee5..1ebe58ebf31 100644 --- a/.github/workflows/reusable_publish_docs.yml +++ b/.github/workflows/reusable_publish_docs.yml @@ -37,7 +37,7 @@ jobs: concurrency: group: on-docs-rebuild runs-on: ubuntu-latest - environment: Docs + environment: "Docs" steps: - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: From 428d104eb5aca82e257b3c09838adf6f75539734 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 24 May 2023 22:33:22 +0100 Subject: [PATCH 62/76] chore(deps): bump actions/setup-python from 4.6.0 to 4.6.1 (#2325) --- .github/workflows/publish_v2_layer.yml | 2 +- .github/workflows/python_build.yml | 2 +- .github/workflows/release.yml | 4 ++-- .github/workflows/reusable_deploy_v2_layer_stack.yml | 2 +- .github/workflows/reusable_publish_docs.yml | 2 +- .github/workflows/run-e2e-tests.yml | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/publish_v2_layer.yml b/.github/workflows/publish_v2_layer.yml index 0046a6bb84d..296ea1376c7 100644 --- a/.github/workflows/publish_v2_layer.yml +++ b/.github/workflows/publish_v2_layer.yml @@ -47,7 +47,7 @@ jobs: with: node-version: "16.12" - name: Setup python - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: "3.10" cache: "pip" diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index 31769f11b17..0648fc30d17 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -36,7 +36,7 @@ jobs: - name: Install poetry run: pipx install poetry - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ matrix.python-version }} cache: "poetry" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d5f22affe18..69f2ec4e370 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -139,7 +139,7 @@ jobs: - name: Install poetry run: pipx install git+https://github.com/python-poetry/poetry@68b88e5390720a3dd84f02940ec5200bfce39ac6 # v1.5.0 - name: Set up Python - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: "3.10" cache: "poetry" @@ -177,7 +177,7 @@ jobs: - name: Install poetry run: pipx install git+https://github.com/python-poetry/poetry@68b88e5390720a3dd84f02940ec5200bfce39ac6 # v1.5.0 - name: Set up Python - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: "3.10" cache: "poetry" diff --git a/.github/workflows/reusable_deploy_v2_layer_stack.yml b/.github/workflows/reusable_deploy_v2_layer_stack.yml index b1113ff3e40..58e4a26f75f 100644 --- a/.github/workflows/reusable_deploy_v2_layer_stack.yml +++ b/.github/workflows/reusable_deploy_v2_layer_stack.yml @@ -108,7 +108,7 @@ jobs: with: node-version: "16.12" - name: Setup python - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: "3.10" cache: "pip" diff --git a/.github/workflows/reusable_publish_docs.yml b/.github/workflows/reusable_publish_docs.yml index 1ebe58ebf31..fdc87524fac 100644 --- a/.github/workflows/reusable_publish_docs.yml +++ b/.github/workflows/reusable_publish_docs.yml @@ -46,7 +46,7 @@ jobs: - name: Install poetry run: pipx install poetry - name: Set up Python - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: "3.10" cache: "poetry" diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index e3305114555..2b4f7c05459 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -38,7 +38,7 @@ jobs: - name: Install poetry run: pipx install poetry - name: "Use Python" - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ matrix.version }} architecture: "x64" From 6e708b1852d360a76d8769c6e4cc2d2f5c21e4b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 24 May 2023 22:33:47 +0100 Subject: [PATCH 63/76] chore(deps-dev): bump pytest-cov from 4.0.0 to 4.1.0 (#2327) --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1dcf4cbe6d3..00f1f2243b5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2254,14 +2254,14 @@ histogram = ["pygal", "pygaljs"] [[package]] name = "pytest-cov" -version = "4.0.0" +version = "4.1.0" description = "Pytest plugin for measuring coverage." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] @@ -3078,4 +3078,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "37442dfae36e56f78e886c158c022c73c1316308ff794e021960408e8abcd1ac" +content-hash = "65c883e1b8122086b32987793339371ecaa515e920b5ad3d1d48cfffc89aab76" diff --git a/pyproject.toml b/pyproject.toml index e19c3fdbb71..77299c012b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,7 +51,7 @@ flake8-fixme = "^1.1.1" flake8-variables-names = "^0.0.5" flake8-black = "^0.3.6" isort = "^5.11.5" -pytest-cov = "^4.0.0" +pytest-cov = "^4.1.0" pytest-mock = "^3.5.1" pdoc3 = "^0.10.0" pytest-asyncio = "^0.21.0" From cb55707942e71c3e45d2a7d451e3c81cba684f9d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 24 May 2023 22:34:14 +0100 Subject: [PATCH 64/76] chore(deps-dev): bump coverage from 7.2.5 to 7.2.6 (#2326) --- poetry.lock | 104 ++++++++++++++++++++++++++-------------------------- 1 file changed, 52 insertions(+), 52 deletions(-) diff --git a/poetry.lock b/poetry.lock index 00f1f2243b5..627e7cd12eb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -536,63 +536,63 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "coverage" -version = "7.2.5" +version = "7.2.6" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"}, - {file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"}, - {file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"}, - {file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"}, - {file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"}, - {file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"}, - {file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"}, - {file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"}, - {file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"}, - {file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"}, - {file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"}, - {file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"}, - {file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"}, - {file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"}, - {file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"}, + {file = "coverage-7.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:496b86f1fc9c81a1cd53d8842ef712e950a4611bba0c42d33366a7b91ba969ec"}, + {file = "coverage-7.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbe6e8c0a9a7193ba10ee52977d4d5e7652957c1f56ccefed0701db8801a2a3b"}, + {file = "coverage-7.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d06b721c2550c01a60e5d3093f417168658fb454e5dfd9a23570e9bffe39a1"}, + {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77a04b84d01f0e12c66f16e69e92616442dc675bbe51b90bfb074b1e5d1c7fbd"}, + {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35db06450272473eab4449e9c2ad9bc6a0a68dab8e81a0eae6b50d9c2838767e"}, + {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6727a0d929ff0028b1ed8b3e7f8701670b1d7032f219110b55476bb60c390bfb"}, + {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aac1d5fdc5378f6bac2c0c7ebe7635a6809f5b4376f6cf5d43243c1917a67087"}, + {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c9e4a5eb1bbc3675ee57bc31f8eea4cd7fb0cbcbe4912cf1cb2bf3b754f4a80"}, + {file = "coverage-7.2.6-cp310-cp310-win32.whl", hash = "sha256:71f739f97f5f80627f1fee2331e63261355fd1e9a9cce0016394b6707ac3f4ec"}, + {file = "coverage-7.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:fde5c7a9d9864d3e07992f66767a9817f24324f354caa3d8129735a3dc74f126"}, + {file = "coverage-7.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc7b667f8654376e9353dd93e55e12ce2a59fb6d8e29fce40de682273425e044"}, + {file = "coverage-7.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:697f4742aa3f26c107ddcb2b1784a74fe40180014edbd9adaa574eac0529914c"}, + {file = "coverage-7.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:541280dde49ce74a4262c5e395b48ea1207e78454788887118c421cb4ffbfcac"}, + {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7f1a8328eeec34c54f1d5968a708b50fc38d31e62ca8b0560e84a968fbf9a9"}, + {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbd58eb5a2371bf160590f4262109f66b6043b0b991930693134cb617bc0169"}, + {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ae82c5f168d2a39a5d69a12a69d4dc23837a43cf2ca99be60dfe59996ea6b113"}, + {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f5440cdaf3099e7ab17a5a7065aed59aff8c8b079597b61c1f8be6f32fe60636"}, + {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6f03f87fea579d55e0b690d28f5042ec1368650466520fbc400e7aeaf09e995"}, + {file = "coverage-7.2.6-cp311-cp311-win32.whl", hash = "sha256:dc4d5187ef4d53e0d4c8eaf530233685667844c5fb0b855fea71ae659017854b"}, + {file = "coverage-7.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:c93d52c3dc7b9c65e39473704988602300e3cc1bad08b5ab5b03ca98bbbc68c1"}, + {file = "coverage-7.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:42c692b55a647a832025a4c048007034fe77b162b566ad537ce65ad824b12a84"}, + {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7786b2fa7809bf835f830779ad285215a04da76293164bb6745796873f0942d"}, + {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25bad4196104761bc26b1dae9b57383826542ec689ff0042f7f4f4dd7a815cba"}, + {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2692306d3d4cb32d2cceed1e47cebd6b1d2565c993d6d2eda8e6e6adf53301e6"}, + {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:392154d09bd4473b9d11351ab5d63391f3d5d24d752f27b3be7498b0ee2b5226"}, + {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fa079995432037b5e2ef5ddbb270bcd2ded9f52b8e191a5de11fe59a00ea30d8"}, + {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d712cefff15c712329113b01088ba71bbcef0f7ea58478ca0bbec63a824844cb"}, + {file = "coverage-7.2.6-cp37-cp37m-win32.whl", hash = "sha256:004948e296149644d208964300cb3d98affc5211e9e490e9979af4030b0d6473"}, + {file = "coverage-7.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:c1d7a31603c3483ac49c1726723b0934f88f2c011c660e6471e7bd735c2fa110"}, + {file = "coverage-7.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3436927d1794fa6763b89b60c896f9e3bd53212001026ebc9080d23f0c2733c1"}, + {file = "coverage-7.2.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44c9b9f1a245f3d0d202b1a8fa666a80b5ecbe4ad5d0859c0fb16a52d9763224"}, + {file = "coverage-7.2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e3783a286d5a93a2921396d50ce45a909aa8f13eee964465012f110f0cbb611"}, + {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cff6980fe7100242170092bb40d2b1cdad79502cd532fd26b12a2b8a5f9aee0"}, + {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c534431153caffc7c495c3eddf7e6a6033e7f81d78385b4e41611b51e8870446"}, + {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3062fd5c62df988cea9f2972c593f77fed1182bfddc5a3b12b1e606cb7aba99e"}, + {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6284a2005e4f8061c58c814b1600ad0074ccb0289fe61ea709655c5969877b70"}, + {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:97729e6828643f168a2a3f07848e1b1b94a366b13a9f5aba5484c2215724edc8"}, + {file = "coverage-7.2.6-cp38-cp38-win32.whl", hash = "sha256:dc11b42fa61ff1e788dd095726a0aed6aad9c03d5c5984b54cb9e1e67b276aa5"}, + {file = "coverage-7.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:cbcc874f454ee51f158afd604a315f30c0e31dff1d5d5bf499fc529229d964dd"}, + {file = "coverage-7.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d3cacc6a665221108ecdf90517a8028d07a2783df3417d12dcfef1c517e67478"}, + {file = "coverage-7.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:272ab31228a9df857ab5df5d67936d8861464dc89c5d3fab35132626e9369379"}, + {file = "coverage-7.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a8723ccec4e564d4b9a79923246f7b9a8de4ec55fa03ec4ec804459dade3c4f"}, + {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5906f6a84b47f995cd1bf0aca1c72d591c55ee955f98074e93660d64dfc66eb9"}, + {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c139b7ab3f0b15f9aad0a3fedef5a1f8c0b2bdc291d88639ca2c97d3682416"}, + {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a5ffd45c6b93c23a8507e2f436983015c6457aa832496b6a095505ca2f63e8f1"}, + {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4f3c7c19581d471af0e9cb49d928172cd8492cd78a2b7a4e82345d33662929bb"}, + {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e8c0e79820cdd67978e1120983786422d279e07a381dbf89d03bbb23ec670a6"}, + {file = "coverage-7.2.6-cp39-cp39-win32.whl", hash = "sha256:13cde6bb0e58fb67d09e2f373de3899d1d1e866c5a9ff05d93615f2f54fbd2bb"}, + {file = "coverage-7.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:6b9f64526286255735847aed0221b189486e0b9ed943446936e41b7e44b08783"}, + {file = "coverage-7.2.6-pp37.pp38.pp39-none-any.whl", hash = "sha256:6babcbf1e66e46052442f10833cfc4a0d3554d8276aa37af8531a83ed3c1a01d"}, + {file = "coverage-7.2.6.tar.gz", hash = "sha256:2025f913f2edb0272ef15d00b1f335ff8908c921c8eb2013536fcaf61f5a683d"}, ] [package.dependencies] From 7e0be2c83e647808ec160cfa0485d25da37a5047 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 May 2023 22:06:28 +0100 Subject: [PATCH 65/76] chore(deps-dev): bump aws-cdk from 2.80.0 to 2.81.0 (#2332) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 14 +++++++------- package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/package-lock.json b/package-lock.json index f4e9f751102..68a7e78f22c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,13 +8,13 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.80.0" + "aws-cdk": "^2.81.0" } }, "node_modules/aws-cdk": { - "version": "2.80.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.80.0.tgz", - "integrity": "sha512-SKMZ/sGlNmFV37Lk40HHe4QJ2hJZmD0PrkScBmkr33xzEqjyKhN3jIHC4PYqTUeUK/qYemq3Y5OpXKQuWTCoKA==", + "version": "2.81.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.81.0.tgz", + "integrity": "sha512-EEwacXaauxHmVBLQzbFDOcjJOAZw57vzUQDJ7eDl3MIDSrKG2dZ1XYHVuMbSloqJpgDW6xZ9vAZ45rXTTjdSzw==", "dev": true, "bin": { "cdk": "bin/cdk" @@ -43,9 +43,9 @@ }, "dependencies": { "aws-cdk": { - "version": "2.80.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.80.0.tgz", - "integrity": "sha512-SKMZ/sGlNmFV37Lk40HHe4QJ2hJZmD0PrkScBmkr33xzEqjyKhN3jIHC4PYqTUeUK/qYemq3Y5OpXKQuWTCoKA==", + "version": "2.81.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.81.0.tgz", + "integrity": "sha512-EEwacXaauxHmVBLQzbFDOcjJOAZw57vzUQDJ7eDl3MIDSrKG2dZ1XYHVuMbSloqJpgDW6xZ9vAZ45rXTTjdSzw==", "dev": true, "requires": { "fsevents": "2.3.2" diff --git a/package.json b/package.json index 8b8063b4000..dd312dfab95 100644 --- a/package.json +++ b/package.json @@ -2,6 +2,6 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.80.0" + "aws-cdk": "^2.81.0" } } From abf8dec0498079f9f39e9a9e4cdebe680c027f55 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 May 2023 02:59:22 +0100 Subject: [PATCH 66/76] chore(deps-dev): bump mkdocs-material from 9.1.14 to 9.1.15 (#2337) --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 627e7cd12eb..24c7207a872 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1574,14 +1574,14 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "9.1.14" +version = "9.1.15" description = "Documentation that simply works" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material-9.1.14-py3-none-any.whl", hash = "sha256:b56a9f955ed32d38333715cbbf68ce38f683bf38610c65094fa4ef2db9f08bcd"}, - {file = "mkdocs_material-9.1.14.tar.gz", hash = "sha256:1ae74cc5464ef2f64574d4884512efed7f4db386fb9bc6af20fd427d7a702f49"}, + {file = "mkdocs_material-9.1.15-py3-none-any.whl", hash = "sha256:b49e12869ab464558e2dd3c5792da5b748a7e0c48ee83b4d05715f98125a7a39"}, + {file = "mkdocs_material-9.1.15.tar.gz", hash = "sha256:8513ab847c9a541ed3d11a3a7eed556caf72991ee786c31c5aac6691a121088a"}, ] [package.dependencies] @@ -3078,4 +3078,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "65c883e1b8122086b32987793339371ecaa515e920b5ad3d1d48cfffc89aab76" +content-hash = "f916179bcebcb9eb8b8e787550f39b750aa673ff28c511041048fd939d44eef2" diff --git a/pyproject.toml b/pyproject.toml index 77299c012b6..428dc6ab96b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ mypy-boto3-s3 = "^1.26.127" mypy-boto3-xray = "^1.26.122" types-requests = "^2.31.0" typing-extensions = "^4.4.0" -mkdocs-material = "^9.1.14" +mkdocs-material = "^9.1.15" filelock = "^3.12.0" checksumdir = "^1.2.0" mypy-boto3-appconfigdata = "^1.26.70" From 93a8bb17137f8ebd20c12f986e0d1c6ad8e4dfe4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 May 2023 02:59:39 +0100 Subject: [PATCH 67/76] chore(deps-dev): bump coverage from 7.2.6 to 7.2.7 (#2338) --- poetry.lock | 113 ++++++++++++++++++++++++++++------------------------ 1 file changed, 61 insertions(+), 52 deletions(-) diff --git a/poetry.lock b/poetry.lock index 24c7207a872..22fde8c4998 100644 --- a/poetry.lock +++ b/poetry.lock @@ -536,63 +536,72 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "coverage" -version = "7.2.6" +version = "7.2.7" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:496b86f1fc9c81a1cd53d8842ef712e950a4611bba0c42d33366a7b91ba969ec"}, - {file = "coverage-7.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbe6e8c0a9a7193ba10ee52977d4d5e7652957c1f56ccefed0701db8801a2a3b"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d06b721c2550c01a60e5d3093f417168658fb454e5dfd9a23570e9bffe39a1"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77a04b84d01f0e12c66f16e69e92616442dc675bbe51b90bfb074b1e5d1c7fbd"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35db06450272473eab4449e9c2ad9bc6a0a68dab8e81a0eae6b50d9c2838767e"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6727a0d929ff0028b1ed8b3e7f8701670b1d7032f219110b55476bb60c390bfb"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aac1d5fdc5378f6bac2c0c7ebe7635a6809f5b4376f6cf5d43243c1917a67087"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c9e4a5eb1bbc3675ee57bc31f8eea4cd7fb0cbcbe4912cf1cb2bf3b754f4a80"}, - {file = "coverage-7.2.6-cp310-cp310-win32.whl", hash = "sha256:71f739f97f5f80627f1fee2331e63261355fd1e9a9cce0016394b6707ac3f4ec"}, - {file = "coverage-7.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:fde5c7a9d9864d3e07992f66767a9817f24324f354caa3d8129735a3dc74f126"}, - {file = "coverage-7.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc7b667f8654376e9353dd93e55e12ce2a59fb6d8e29fce40de682273425e044"}, - {file = "coverage-7.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:697f4742aa3f26c107ddcb2b1784a74fe40180014edbd9adaa574eac0529914c"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:541280dde49ce74a4262c5e395b48ea1207e78454788887118c421cb4ffbfcac"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7f1a8328eeec34c54f1d5968a708b50fc38d31e62ca8b0560e84a968fbf9a9"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbd58eb5a2371bf160590f4262109f66b6043b0b991930693134cb617bc0169"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ae82c5f168d2a39a5d69a12a69d4dc23837a43cf2ca99be60dfe59996ea6b113"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f5440cdaf3099e7ab17a5a7065aed59aff8c8b079597b61c1f8be6f32fe60636"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6f03f87fea579d55e0b690d28f5042ec1368650466520fbc400e7aeaf09e995"}, - {file = "coverage-7.2.6-cp311-cp311-win32.whl", hash = "sha256:dc4d5187ef4d53e0d4c8eaf530233685667844c5fb0b855fea71ae659017854b"}, - {file = "coverage-7.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:c93d52c3dc7b9c65e39473704988602300e3cc1bad08b5ab5b03ca98bbbc68c1"}, - {file = "coverage-7.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:42c692b55a647a832025a4c048007034fe77b162b566ad537ce65ad824b12a84"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7786b2fa7809bf835f830779ad285215a04da76293164bb6745796873f0942d"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25bad4196104761bc26b1dae9b57383826542ec689ff0042f7f4f4dd7a815cba"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2692306d3d4cb32d2cceed1e47cebd6b1d2565c993d6d2eda8e6e6adf53301e6"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:392154d09bd4473b9d11351ab5d63391f3d5d24d752f27b3be7498b0ee2b5226"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fa079995432037b5e2ef5ddbb270bcd2ded9f52b8e191a5de11fe59a00ea30d8"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d712cefff15c712329113b01088ba71bbcef0f7ea58478ca0bbec63a824844cb"}, - {file = "coverage-7.2.6-cp37-cp37m-win32.whl", hash = "sha256:004948e296149644d208964300cb3d98affc5211e9e490e9979af4030b0d6473"}, - {file = "coverage-7.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:c1d7a31603c3483ac49c1726723b0934f88f2c011c660e6471e7bd735c2fa110"}, - {file = "coverage-7.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3436927d1794fa6763b89b60c896f9e3bd53212001026ebc9080d23f0c2733c1"}, - {file = "coverage-7.2.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44c9b9f1a245f3d0d202b1a8fa666a80b5ecbe4ad5d0859c0fb16a52d9763224"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e3783a286d5a93a2921396d50ce45a909aa8f13eee964465012f110f0cbb611"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cff6980fe7100242170092bb40d2b1cdad79502cd532fd26b12a2b8a5f9aee0"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c534431153caffc7c495c3eddf7e6a6033e7f81d78385b4e41611b51e8870446"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3062fd5c62df988cea9f2972c593f77fed1182bfddc5a3b12b1e606cb7aba99e"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6284a2005e4f8061c58c814b1600ad0074ccb0289fe61ea709655c5969877b70"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:97729e6828643f168a2a3f07848e1b1b94a366b13a9f5aba5484c2215724edc8"}, - {file = "coverage-7.2.6-cp38-cp38-win32.whl", hash = "sha256:dc11b42fa61ff1e788dd095726a0aed6aad9c03d5c5984b54cb9e1e67b276aa5"}, - {file = "coverage-7.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:cbcc874f454ee51f158afd604a315f30c0e31dff1d5d5bf499fc529229d964dd"}, - {file = "coverage-7.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d3cacc6a665221108ecdf90517a8028d07a2783df3417d12dcfef1c517e67478"}, - {file = "coverage-7.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:272ab31228a9df857ab5df5d67936d8861464dc89c5d3fab35132626e9369379"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a8723ccec4e564d4b9a79923246f7b9a8de4ec55fa03ec4ec804459dade3c4f"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5906f6a84b47f995cd1bf0aca1c72d591c55ee955f98074e93660d64dfc66eb9"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c139b7ab3f0b15f9aad0a3fedef5a1f8c0b2bdc291d88639ca2c97d3682416"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a5ffd45c6b93c23a8507e2f436983015c6457aa832496b6a095505ca2f63e8f1"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4f3c7c19581d471af0e9cb49d928172cd8492cd78a2b7a4e82345d33662929bb"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e8c0e79820cdd67978e1120983786422d279e07a381dbf89d03bbb23ec670a6"}, - {file = "coverage-7.2.6-cp39-cp39-win32.whl", hash = "sha256:13cde6bb0e58fb67d09e2f373de3899d1d1e866c5a9ff05d93615f2f54fbd2bb"}, - {file = "coverage-7.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:6b9f64526286255735847aed0221b189486e0b9ed943446936e41b7e44b08783"}, - {file = "coverage-7.2.6-pp37.pp38.pp39-none-any.whl", hash = "sha256:6babcbf1e66e46052442f10833cfc4a0d3554d8276aa37af8531a83ed3c1a01d"}, - {file = "coverage-7.2.6.tar.gz", hash = "sha256:2025f913f2edb0272ef15d00b1f335ff8908c921c8eb2013536fcaf61f5a683d"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] [package.dependencies] From f00eeabb5f377bc42eda6deb10ad435520bef365 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 May 2023 02:59:57 +0100 Subject: [PATCH 68/76] chore(deps-dev): bump types-requests from 2.31.0.0 to 2.31.0.1 (#2339) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 22fde8c4998..a58279dc584 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2850,14 +2850,14 @@ files = [ [[package]] name = "types-requests" -version = "2.31.0.0" +version = "2.31.0.1" description = "Typing stubs for requests" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-requests-2.31.0.0.tar.gz", hash = "sha256:c1c29d20ab8d84dff468d7febfe8e0cb0b4664543221b386605e14672b44ea25"}, - {file = "types_requests-2.31.0.0-py3-none-any.whl", hash = "sha256:7c5cea7940f8e92ec560bbc468f65bf684aa3dcf0554a6f8c4710f5f708dc598"}, + {file = "types-requests-2.31.0.1.tar.gz", hash = "sha256:3de667cffa123ce698591de0ad7db034a5317457a596eb0b4944e5a9d9e8d1ac"}, + {file = "types_requests-2.31.0.1-py3-none-any.whl", hash = "sha256:afb06ef8f25ba83d59a1d424bd7a5a939082f94b94e90ab5e6116bd2559deaa3"}, ] [package.dependencies] From c6fb0b48cec1a179bb5bb1ce03c86475ddbb8088 Mon Sep 17 00:00:00 2001 From: Simon Thulbourn Date: Wed, 31 May 2023 09:56:45 +0200 Subject: [PATCH 69/76] docs(project): rename project to Powertools for AWS Lambda (Python) (#2313) Co-authored-by: Leandro Damascena --- .github/ISSUE_TEMPLATE/bug_report.yml | 2 +- .github/ISSUE_TEMPLATE/feature_request.yml | 8 +-- .github/ISSUE_TEMPLATE/maintenance.yml | 4 +- .github/ISSUE_TEMPLATE/rfc.yml | 6 +- .github/ISSUE_TEMPLATE/share_your_work.yml | 8 +-- .github/ISSUE_TEMPLATE/static_typing.yml | 2 +- .github/ISSUE_TEMPLATE/support_powertools.yml | 12 ++-- .github/ISSUE_TEMPLATE/tech_debt.yml | 4 +- .github/actions/create-pr/action.yml | 4 +- .github/boring-cyborg.yml | 4 +- .github/workflows/release.yml | 2 +- CHANGELOG.md | 10 ++-- MAINTAINERS.md | 4 +- README.md | 16 ++--- aws_lambda_powertools/logging/formatter.py | 2 +- aws_lambda_powertools/logging/logger.py | 14 ++--- aws_lambda_powertools/logging/utils.py | 4 +- aws_lambda_powertools/package_logger.py | 2 +- .../utilities/validation/exceptions.py | 2 +- benchmark/README.md | 2 +- docs/core/event_handler/api_gateway.md | 2 +- docs/core/event_handler/appsync.md | 2 +- docs/core/logger.md | 12 ++-- docs/core/tracer.md | 2 +- docs/index.md | 40 ++++++------- docs/roadmap.md | 2 +- docs/tutorial/index.md | 34 +++++------ docs/upgrade.md | 6 +- docs/utilities/feature_flags.md | 4 +- docs/utilities/idempotency.md | 4 +- docs/utilities/jmespath_functions.md | 8 +-- docs/utilities/middleware_factory.md | 4 +- docs/utilities/parser.md | 2 +- docs/utilities/validation.md | 2 +- docs/we_made_this.md | 6 +- .../event_handler_graphql/sam/template.yaml | 2 +- examples/logger/sam/template.yaml | 2 +- examples/metrics/sam/template.yaml | 2 +- examples/tracer/sam/template.yaml | 2 +- layer/README.md | 12 ++-- layer/app.py | 2 +- layer/layer/canary/app.py | 8 +-- layer/pyproject.toml | 2 +- layer/scripts/layer-balancer/main.go | 4 +- mkdocs.yml | 4 +- pyproject.toml | 4 +- .../idempotency/test_idempotency_dynamodb.py | 10 ++-- tests/e2e/parameters/infrastructure.py | 4 +- tests/e2e/utils/data_fetcher/logs.py | 2 +- .../idempotency/test_idempotency.py | 2 +- tests/functional/test_logger.py | 2 +- tests/functional/test_logger_utils.py | 60 ++++++++++--------- 52 files changed, 184 insertions(+), 182 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index f9fa23ae952..43e6d32030b 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -48,7 +48,7 @@ body: - type: input id: version attributes: - label: AWS Lambda Powertools for Python version + label: Powertools for AWS Lambda (Python) version placeholder: "latest, 1.25.6" value: latest validations: diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml index 53e362cc779..b0a5c687e18 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -1,12 +1,12 @@ name: Feature request -description: Suggest an idea for Lambda Powertools +description: Suggest an idea for Powertools for AWS Lambda (Python) title: "Feature request: TITLE" labels: ["feature-request", "triage"] body: - type: markdown attributes: value: | - Thank you for taking the time to suggest an idea to the Lambda Powertools project. + Thank you for taking the time to suggest an idea to the Powertools for AWS Lambda (Python) project. *Future readers*: Please react with 👍 and your use case to help us understand customer demand. - type: textarea @@ -36,9 +36,9 @@ body: attributes: label: Acknowledgment options: - - label: This feature request meets [Lambda Powertools Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets) + - label: This feature request meets [Powertools for AWS Lambda (Python) Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets) required: true - - label: Should this be considered in other Lambda Powertools languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/), and [.NET](https://github.com/awslabs/aws-lambda-powertools-dotnet/) + - label: Should this be considered in other Powertools for AWS Lambda languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/), and [.NET](https://github.com/awslabs/aws-lambda-powertools-dotnet/) required: false - type: markdown attributes: diff --git a/.github/ISSUE_TEMPLATE/maintenance.yml b/.github/ISSUE_TEMPLATE/maintenance.yml index 9f9798c8aed..2d9f7a32b07 100644 --- a/.github/ISSUE_TEMPLATE/maintenance.yml +++ b/.github/ISSUE_TEMPLATE/maintenance.yml @@ -53,9 +53,9 @@ body: attributes: label: Acknowledgment options: - - label: This request meets [Lambda Powertools Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets) + - label: This request meets [Powertools for AWS Lambda (Python) Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets) required: true - - label: Should this be considered in other Lambda Powertools languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/), and [.NET](https://github.com/awslabs/aws-lambda-powertools-dotnet/) + - label: Should this be considered in other Powertools for AWS Lambda languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/), and [.NET](https://github.com/awslabs/aws-lambda-powertools-dotnet/) required: false - type: markdown attributes: diff --git a/.github/ISSUE_TEMPLATE/rfc.yml b/.github/ISSUE_TEMPLATE/rfc.yml index 457ae558bc6..95108a58dbb 100644 --- a/.github/ISSUE_TEMPLATE/rfc.yml +++ b/.github/ISSUE_TEMPLATE/rfc.yml @@ -15,7 +15,7 @@ body: - type: dropdown id: area attributes: - label: Which AWS Lambda Powertools utility does this relate to? + label: Which Powertools for AWS Lambda (Python) utility does this relate to? options: - Tracer - Logger @@ -91,9 +91,9 @@ body: attributes: label: Acknowledgment options: - - label: This feature request meets [Lambda Powertools Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets) + - label: This feature request meets [Powertools for AWS Lambda (Python) Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets) required: true - - label: Should this be considered in other Lambda Powertools languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/), and [.NET](https://github.com/awslabs/aws-lambda-powertools-dotnet/) + - label: Should this be considered in other Powertools for AWS Lambda languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/), and [.NET](https://github.com/awslabs/aws-lambda-powertools-dotnet/) required: false - type: markdown attributes: diff --git a/.github/ISSUE_TEMPLATE/share_your_work.yml b/.github/ISSUE_TEMPLATE/share_your_work.yml index 974aec87b06..e4e4ed601be 100644 --- a/.github/ISSUE_TEMPLATE/share_your_work.yml +++ b/.github/ISSUE_TEMPLATE/share_your_work.yml @@ -1,5 +1,5 @@ name: I Made This (showcase your work) -description: Share what you did with Powertools 💞💞. Blog post, workshops, presentation, sample apps, etc. +description: Share what you did with Powertools for AWS Lambda (Python) 💞💞. Blog post, workshops, presentation, sample apps, etc. title: "[I Made This]: " labels: ["community-content"] body: @@ -13,7 +13,7 @@ body: description: | Please share the original link to your material. - *Note: Short links will be expanded when added to Powertools documentation* + *Note: Short links will be expanded when added to Powertools for AWS Lambda (Python) documentation* validations: required: true - type: textarea @@ -44,7 +44,7 @@ body: description: | Any notes you might want to share with us related to this material. - *Note: These notes are explicitly to Powertools maintainers. It will not be added to the community resources page.* + *Note: These notes are explicitly to Powertools for AWS Lambda (Python) maintainers. It will not be added to the community resources page.* validations: required: false - type: checkboxes @@ -52,5 +52,5 @@ body: attributes: label: Acknowledgment options: - - label: I understand this content may be removed from Powertools documentation if it doesn't conform with the [Code of Conduct](https://aws.github.io/code-of-conduct) + - label: I understand this content may be removed from Powertools for AWS Lambda (Python) documentation if it doesn't conform with the [Code of Conduct](https://aws.github.io/code-of-conduct) required: true diff --git a/.github/ISSUE_TEMPLATE/static_typing.yml b/.github/ISSUE_TEMPLATE/static_typing.yml index 60f216d3b92..42168abaa1f 100644 --- a/.github/ISSUE_TEMPLATE/static_typing.yml +++ b/.github/ISSUE_TEMPLATE/static_typing.yml @@ -34,7 +34,7 @@ body: - type: input id: version attributes: - label: AWS Lambda Powertools for Python version + label: Powertools for AWS Lambda (Python) version placeholder: "latest, 1.25.6" value: latest validations: diff --git a/.github/ISSUE_TEMPLATE/support_powertools.yml b/.github/ISSUE_TEMPLATE/support_powertools.yml index e03b1627044..c2c5f33676c 100644 --- a/.github/ISSUE_TEMPLATE/support_powertools.yml +++ b/.github/ISSUE_TEMPLATE/support_powertools.yml @@ -1,6 +1,6 @@ -name: Support Lambda Powertools (become a reference) -description: Add your organization's name or logo to the Lambda Powertools documentation -title: "[Support Lambda Powertools]: <your organization name>" +name: Support Powertools for AWS Lambda (Python) (become a reference) +description: Add your organization's name or logo to the Powertools for AWS Lambda (Python) documentation +title: "[Support Powertools for AWS Lambda (Python)]: <your organization name>" labels: ["customer-reference"] body: - type: markdown @@ -42,13 +42,13 @@ body: id: use_case attributes: label: (Optional) Use case - description: How are you using Lambda Powertools today? *features, etc.* + description: How are you using Powertools for AWS Lambda (Python) today? *features, etc.* validations: required: false - type: checkboxes id: other_languages attributes: - label: Also using other Lambda Powertools languages? + label: Also using other Powertools for AWS Lambda languages? options: - label: Java required: false @@ -59,6 +59,6 @@ body: - type: markdown attributes: value: | - *By raising a Support Lambda Powertools issue, you are granting AWS permission to use your company's name (and/or logo) for the limited purpose described here. You are also confirming that you have authority to grant such permission.* + *By raising a Support Powertools for AWS Lambda (Python) issue, you are granting AWS permission to use your company's name (and/or logo) for the limited purpose described here. You are also confirming that you have authority to grant such permission.* *You can opt-out at any time by commenting or reopening this issue.* diff --git a/.github/ISSUE_TEMPLATE/tech_debt.yml b/.github/ISSUE_TEMPLATE/tech_debt.yml index f2933cb4ce8..bca5dd25c8a 100644 --- a/.github/ISSUE_TEMPLATE/tech_debt.yml +++ b/.github/ISSUE_TEMPLATE/tech_debt.yml @@ -50,9 +50,9 @@ body: attributes: label: Acknowledgment options: - - label: This request meets [Lambda Powertools Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets) + - label: This request meets [Powertools for AWS Lambda (Python) Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets) required: true - - label: Should this be considered in other Lambda Powertools languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/), and [.NET](https://github.com/awslabs/aws-lambda-powertools-dotnet/) + - label: Should this be considered in other Powertools for AWS Lambda languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/), and [.NET](https://github.com/awslabs/aws-lambda-powertools-dotnet/) required: false - type: markdown attributes: diff --git a/.github/actions/create-pr/action.yml b/.github/actions/create-pr/action.yml index b7713a6c785..dcf2df738bd 100644 --- a/.github/actions/create-pr/action.yml +++ b/.github/actions/create-pr/action.yml @@ -3,7 +3,7 @@ description: "Create a PR and a temporary branch, close duplicates" # PROCESS # -# 1. Setup git client using Powertools bot username +# 1. Setup git client using Powertools for AWS Lambda (Python) bot username # 2. Pushes staged files to a temporary branch # 3. Creates a PR from temporary branch against a target branch (typically trunk: develop, main, etc.) # 4. Searches for duplicate PRs with the same title @@ -63,7 +63,7 @@ runs: - id: setup-git name: Git client setup and refresh tip run: | - git config user.name "Powertools bot" + git config user.name "Powertools for AWS Lambda (Python) bot" git config user.email "aws-lambda-powertools-feedback@amazon.com" git config pull.rebase true git config remote.origin.url >&- diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index 7764a1d73d7..006db19c585 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -97,7 +97,7 @@ labelPRBasedOnFilePath: firstPRWelcomeComment: > Thanks a lot for your first contribution! Please check out our contributing guidelines and don't hesitate to ask whatever you need. - In the meantime, check out the #python channel on our AWS Lambda Powertools Discord: [Invite link](https://discord.gg/B8zZKbbyET) + In the meantime, check out the #python channel on our Powertools for AWS Lambda Discord: [Invite link](https://discord.gg/B8zZKbbyET) # Comment to be posted to congratulate user on their first merged PR firstPRMergeComment: > @@ -107,7 +107,7 @@ firstPRMergeComment: > firstIssueWelcomeComment: > Thanks for opening your first issue here! We'll come back to you as soon as we can. - In the meantime, check out the #python channel on our AWS Lambda Powertools Discord: [Invite link](https://discord.gg/B8zZKbbyET) + In the meantime, check out the #python channel on our Powertools for AWS Lambda Discord: [Invite link](https://discord.gg/B8zZKbbyET) ###### IssueLink Adder ################################################################################################# # Insert Issue (Jira/Github etc) link in PR description based on the Issue ID in PR title. diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 69f2ec4e370..70893998270 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -285,7 +285,7 @@ jobs: - id: setup-git name: Git client setup and refresh tip run: | - git config user.name "Powertools bot" + git config user.name "Powertools for AWS Lambda (Python) bot" git config user.email "aws-lambda-powertools-feedback@amazon.com" git config remote.origin.url >&- diff --git a/CHANGELOG.md b/CHANGELOG.md index 4839a50912a..5e40d717474 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -80,7 +80,7 @@ * **deps-dev:** bump cfn-lint from 0.77.2 to 0.77.3 ([#2165](https://github.com/awslabs/aws-lambda-powertools-python/issues/2165)) * **deps-dev:** bump mkdocs-material from 9.1.6 to 9.1.8 ([#2162](https://github.com/awslabs/aws-lambda-powertools-python/issues/2162)) * **deps-dev:** bump coverage from 7.2.3 to 7.2.4 ([#2179](https://github.com/awslabs/aws-lambda-powertools-python/issues/2179)) -* **governance:** add Lambda Powertools for .NET in issue templates ([#2196](https://github.com/awslabs/aws-lambda-powertools-python/issues/2196)) +* **governance:** add Powertools for AWS Lambda (.NET) in issue templates ([#2196](https://github.com/awslabs/aws-lambda-powertools-python/issues/2196)) <a name="v2.14.1"></a> @@ -368,7 +368,7 @@ ## Documentation * **event_handlers:** Fix REST API - HTTP Methods documentation ([#1936](https://github.com/awslabs/aws-lambda-powertools-python/issues/1936)) -* **home:** update powertools definition +* **home:** update Powertools for AWS Lambda (Python) definition * **we-made-this:** add CI/CD using Feature Flags video ([#1940](https://github.com/awslabs/aws-lambda-powertools-python/issues/1940)) * **we-made-this:** add Feature Flags post ([#1939](https://github.com/awslabs/aws-lambda-powertools-python/issues/1939)) @@ -1249,7 +1249,7 @@ * **event-handler:** snippets split, improved, and lint ([#1279](https://github.com/awslabs/aws-lambda-powertools-python/issues/1279)) * **graphql:** snippets split, improved, and lint ([#1287](https://github.com/awslabs/aws-lambda-powertools-python/issues/1287)) -* **homepage:** emphasize additional powertools languages ([#1292](https://github.com/awslabs/aws-lambda-powertools-python/issues/1292)) +* **homepage:** emphasize additional Powertools for AWS Lambda languages ([#1292](https://github.com/awslabs/aws-lambda-powertools-python/issues/1292)) * **metrics:** snippets split, improved, and lint ## Maintenance @@ -1720,7 +1720,7 @@ ## Features * **ci:** auto-notify & close issues on release -* **logger:** clone powertools logger config to any Python logger ([#927](https://github.com/awslabs/aws-lambda-powertools-python/issues/927)) +* **logger:** clone Powertools for AWS Lambda (Python) logger config to any Python logger ([#927](https://github.com/awslabs/aws-lambda-powertools-python/issues/927)) ## Maintenance @@ -1936,7 +1936,7 @@ ## Features -* expose jmespath powertools functions ([#736](https://github.com/awslabs/aws-lambda-powertools-python/issues/736)) +* expose jmespath Powertools for AWS Lambda (Python) functions ([#736](https://github.com/awslabs/aws-lambda-powertools-python/issues/736)) * add get_raw_configuration property in store; expose store * boto3 sessions in batch, parameters & idempotency ([#717](https://github.com/awslabs/aws-lambda-powertools-python/issues/717)) * **feature-flags:** Bring your own logger for debug ([#709](https://github.com/awslabs/aws-lambda-powertools-python/issues/709)) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 3525147f68f..114ac52bf1e 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -96,7 +96,7 @@ These are the most common labels used by maintainers to triage issues, pull requ | size/XL | PRs between 500-999 LOC, often PRs that grown with feedback | PR automation | | size/XXL | PRs with 1K+ LOC, largely documentation related | PR automation | | tests | PRs that add or change tests | PR automation | -| `<utility>` | PRs related to a Powertools utility, e.g. `parameters`, `tracer` | PR automation | +| `<utility>` | PRs related to a Powertools for AWS Lambda (Python) utility, e.g. `parameters`, `tracer` | PR automation | | feature | New features or minor changes | PR/Release automation | | dependencies | Changes that touch dependencies, e.g. Dependabot, etc. | PR/ automation | | github-actions | Changes in GitHub workflows | PR automation | @@ -231,7 +231,7 @@ To run locally, you need [AWS CDK CLI](https://docs.aws.amazon.com/cdk/v2/guide/ ### Releasing a documentation hotfix -You can rebuild the latest documentation without a full release via this [GitHub Actions Workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/rebuild_latest_docs.yml). Choose `Run workflow`, keep `develop` as the branch, and input the latest Powertools version available. +You can rebuild the latest documentation without a full release via this [GitHub Actions Workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/rebuild_latest_docs.yml). Choose `Run workflow`, keep `develop` as the branch, and input the latest Powertools for AWS Lambda (Python) version available. This workflow will update both user guide and API documentation. diff --git a/README.md b/README.md index ee025f52261..9aa79dc3f2c 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,11 @@ <!-- markdownlint-disable MD013 MD041 MD043 --> -# AWS Lambda Powertools for Python +# Powertools for AWS Lambda (Python) [![Build](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/python_build.yml/badge.svg)](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/python_build.yml) [![codecov.io](https://codecov.io/github/awslabs/aws-lambda-powertools-python/branch/develop/graphs/badge.svg)](https://app.codecov.io/gh/awslabs/aws-lambda-powertools-python) ![PythonSupport](https://img.shields.io/static/v1?label=python&message=%203.7|%203.8|%203.9|%203.10&color=blue?style=flat-square&logo=python) ![PyPI version](https://badge.fury.io/py/aws-lambda-powertools.svg) ![PyPi monthly downloads](https://img.shields.io/pypi/dm/aws-lambda-powertools) [![Join our Discord](https://dcbadge.vercel.app/api/server/B8zZKbbyET)](https://discord.gg/B8zZKbbyET) -Powertools is a developer toolkit to implement Serverless [best practices and increase developer velocity](https://awslabs.github.io/aws-lambda-powertools-python/latest/#features). +Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless [best practices and increase developer velocity](https://awslabs.github.io/aws-lambda-powertools-python/latest/#features). > Also available in [Java](https://github.com/awslabs/aws-lambda-powertools-java), [Typescript](https://github.com/awslabs/aws-lambda-powertools-typescript), and [.NET](https://awslabs.github.io/aws-lambda-powertools-dotnet/). @@ -45,11 +45,11 @@ With [pip](https://pip.pypa.io/en/latest/index.html) installed, run: ``pip insta * [Serverless E-commerce platform](https://github.com/aws-samples/aws-serverless-ecommerce-platform) * [Serverless GraphQL Nanny Booking Api](https://github.com/trey-rosius/babysitter_api) -## How to support AWS Lambda Powertools for Python? +## How to support Powertools for AWS Lambda (Python)? ### Becoming a reference customer -Knowing which companies are using this library is important to help prioritize the project internally. If your company is using AWS Lambda Powertools for Python, you can request to have your name and logo added to the README file by raising a [Support Lambda Powertools (become a reference)](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=customer-reference&template=support_powertools.yml&title=%5BSupport+Lambda+Powertools%5D%3A+%3Cyour+organization+name%3E) issue. +Knowing which companies are using this library is important to help prioritize the project internally. If your company is using Powertools for AWS Lambda (Python), you can request to have your name and logo added to the README file by raising a [Support Powertools for AWS Lambda (Python) (become a reference)](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=customer-reference&template=support_powertools.yml&title=%5BSupport+Lambda+Powertools%5D%3A+%3Cyour+organization+name%3E) issue. The following companies, among others, use Powertools: @@ -65,20 +65,20 @@ The following companies, among others, use Powertools: ### Sharing your work -Share what you did with Powertools 💞💞. Blog post, workshops, presentation, sample apps and others. Check out what the community has already shared about Powertools [here](https://awslabs.github.io/aws-lambda-powertools-python/latest/we_made_this/). +Share what you did with Powertools for AWS Lambda (Python) 💞💞. Blog post, workshops, presentation, sample apps and others. Check out what the community has already shared about Powertools for AWS Lambda (Python) [here](https://awslabs.github.io/aws-lambda-powertools-python/latest/we_made_this/). ### Using Lambda Layer or SAR -This helps us understand who uses Powertools in a non-intrusive way, and helps us gain future investments for other Powertools languages. When [using Layers](https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer), you can add Powertools as a dev dependency (or as part of your virtual env) to not impact the development process. +This helps us understand who uses Powertools for AWS Lambda (Python) in a non-intrusive way, and helps us gain future investments for other Powertools for AWS Lambda languages. When [using Layers](https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer), you can add Powertools for AWS Lambda (Python) as a dev dependency (or as part of your virtual env) to not impact the development process. ## Credits * Structured logging initial implementation from [aws-lambda-logging](https://gitlab.com/hadrien/aws_lambda_logging) -* Powertools idea [DAZN Powertools](https://github.com/getndazn/dazn-lambda-powertools/) +* Powertools for AWS Lambda (Python) idea [DAZN Powertools](https://github.com/getndazn/dazn-lambda-powertools/) ## Connect -* **AWS Lambda Powertools on Discord**: `#python` - **[Invite link](https://discord.gg/B8zZKbbyET)** +* **Powertools for AWS Lambda on Discord**: `#python` - **[Invite link](https://discord.gg/B8zZKbbyET)** * **Email**: aws-lambda-powertools-feedback@amazon.com ## Security disclosures diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py index db80876c798..600a1e726c4 100644 --- a/aws_lambda_powertools/logging/formatter.py +++ b/aws_lambda_powertools/logging/formatter.py @@ -54,7 +54,7 @@ def clear_state(self): class LambdaPowertoolsFormatter(BasePowertoolsFormatter): - """AWS Lambda Powertools Logging formatter. + """Powertools for AWS Lambda (Python) Logging formatter. Formats the log message as a JSON encoded string. If the message is a dict it will be used directly. diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index 88ba8122da9..201cb942fdd 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -613,7 +613,7 @@ def structure_logs(self, append: bool = False, formatter_options: Optional[Dict] formatter_options = formatter_options or {} # There are 3 operational modes for this method - ## 1. Register a Powertools Formatter for the first time + ## 1. Register a Powertools for AWS Lambda (Python) Formatter for the first time ## 2. Append new keys to the current logger formatter; deprecated in favour of append_keys ## 3. Add new keys and discard existing to the registered formatter @@ -624,11 +624,11 @@ def structure_logs(self, append: bool = False, formatter_options: Optional[Dict] formatter = self.logger_formatter or LambdaPowertoolsFormatter(**formatter_options, **log_keys) # type: ignore # noqa: E501 self.registered_handler.setFormatter(formatter) - # when using a custom Lambda Powertools Formatter - # standard and custom keys that are not Powertools Formatter parameters should be appended - # and custom keys that might happen to be Powertools Formatter parameters should be discarded - # this prevents adding them as custom keys, for example, `json_default=<callable>` - # see https://github.com/awslabs/aws-lambda-powertools-python/issues/1263 + # when using a custom Powertools for AWS Lambda (Python) Formatter + # standard and custom keys that are not Powertools for AWS Lambda (Python) Formatter parameters + # should be appended and custom keys that might happen to be Powertools for AWS Lambda (Python) + # Formatter parameters should be discarded this prevents adding them as custom keys, for example, + # `json_default=<callable>` see https://github.com/awslabs/aws-lambda-powertools-python/issues/1263 custom_keys = {k: v for k, v in log_keys.items() if k not in RESERVED_FORMATTER_CUSTOM_KEYS} return self.registered_formatter.append_keys(**custom_keys) @@ -733,7 +733,7 @@ def set_package_logger( Example ------- - **Enables debug logging for AWS Lambda Powertools package** + **Enables debug logging for Powertools for AWS Lambda (Python) package** >>> aws_lambda_powertools.logging.logger import set_package_logger >>> set_package_logger() diff --git a/aws_lambda_powertools/logging/utils.py b/aws_lambda_powertools/logging/utils.py index 5cd8093073a..6a62a79055f 100644 --- a/aws_lambda_powertools/logging/utils.py +++ b/aws_lambda_powertools/logging/utils.py @@ -17,7 +17,7 @@ def copy_config_to_registered_loggers( Parameters ---------- source_logger : Logger - Powertools Logger to copy configuration from + Powertools for AWS Lambda (Python) Logger to copy configuration from log_level : Union[int, str], optional Logging level to set to registered loggers, by default uses source_logger logging level include : Optional[Set[str]], optional @@ -34,7 +34,7 @@ def copy_config_to_registered_loggers( # 3. Include and exclude set? Add Logger if it’s in include and not in exclude # 4. Only exclude set? Ignore Logger in the excluding list - # Exclude source and powertools package logger by default + # Exclude source and Powertools for AWS Lambda (Python) package logger by default # If source logger is a child ensure we exclude parent logger to not break child logger # from receiving/pushing updates to keys being added/removed source_logger_name = source_logger.name.split(".")[0] diff --git a/aws_lambda_powertools/package_logger.py b/aws_lambda_powertools/package_logger.py index e6c58ba9549..6783d0c3e50 100644 --- a/aws_lambda_powertools/package_logger.py +++ b/aws_lambda_powertools/package_logger.py @@ -5,7 +5,7 @@ def set_package_logger_handler(stream=None): - """Sets up Lambda Powertools package logging. + """Sets up Powertools for AWS Lambda (Python) package logging. By default, we discard any output to not interfere with customers logging. diff --git a/aws_lambda_powertools/utilities/validation/exceptions.py b/aws_lambda_powertools/utilities/validation/exceptions.py index 2f13ff64188..8789e3f2e80 100644 --- a/aws_lambda_powertools/utilities/validation/exceptions.py +++ b/aws_lambda_powertools/utilities/validation/exceptions.py @@ -22,7 +22,7 @@ def __init__( Parameters ---------- message : str, optional - Powertools formatted error message + Powertools for AWS Lambda (Python) formatted error message validation_message : str, optional Containing human-readable information what is wrong (e.g. `data.property[index] must be smaller than or equal to 42`) diff --git a/benchmark/README.md b/benchmark/README.md index 84b48129d94..c6901af28dc 100644 --- a/benchmark/README.md +++ b/benchmark/README.md @@ -1,6 +1,6 @@ # Cold Start Benchmark -The [benchmark.sh script](./benchmark.sh) is a bash script to compare the cold-start time of using the AWS Lambda Powertools in a semi-automated way. It does so by deploying two Lambda functions which both have the aws-lambda-powertools module installed. One Lambda function will import and initialize the three core utilities (`Metrics`, `Logger`, `Tracer`), while the other one will not. +The [benchmark.sh script](./benchmark.sh) is a bash script to compare the cold-start time of using the Powertools for AWS Lambda (Python) in a semi-automated way. It does so by deploying two Lambda functions which both have the aws-lambda-powertools module installed. One Lambda function will import and initialize the three core utilities (`Metrics`, `Logger`, `Tracer`), while the other one will not. Please note that this requires the [SAM CLI](https://github.com/aws/aws-sam-cli) version 1.2.0 or later. diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 3dc6401ea8d..4f984ecea6f 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -340,7 +340,7 @@ For convenience, these are the default values when using `CORSConfig` to enable You can use the `Response` class to have full control over the response. For example, you might want to add additional headers, cookies, or set a custom Content-type. ???+ info - Powertools serializes headers and cookies according to the type of input event. + Powertools for AWS Lambda (Python) serializes headers and cookies according to the type of input event. Some event sources require headers and cookies to be encoded as `multiValueHeaders`. ???+ warning "Using multiple values for HTTP headers in ALB?" diff --git a/docs/core/event_handler/appsync.md b/docs/core/event_handler/appsync.md index 14a638b6123..789bf788004 100644 --- a/docs/core/event_handler/appsync.md +++ b/docs/core/event_handler/appsync.md @@ -156,7 +156,7 @@ Assuming you have [Amplify CLI installed](https://docs.amplify.aws/cli/start/ins ???+ note Amplify CLI generated functions use `Pipenv` as a dependency manager. Your function source code is located at **`amplify/backend/function/your-function-name`**. -Within your function's folder, add Powertools as a dependency with `pipenv install aws-lambda-powertools`. +Within your function's folder, add Powertools for AWS Lambda (Python) as a dependency with `pipenv install aws-lambda-powertools`. Use the following code for `merchantInfo` and `searchMerchant` functions respectively. diff --git a/docs/core/logger.md b/docs/core/logger.md index 2f0472368c3..064328dabb0 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -441,7 +441,7 @@ If you prefer configuring it separately, or you'd want to bring this JSON Format ???+ info When `POWERTOOLS_DEV` env var is present and set to `"true"`, Logger's default serializer (`json.dumps`) will pretty-print log messages for easier readability. -```python hl_lines="2 7-8" title="Pre-configuring Lambda Powertools Formatter" +```python hl_lines="2 7-8" title="Pre-configuring Powertools for AWS Lambda (Python) Formatter" --8<-- "examples/logger/src/powertools_formatter_setup.py" ``` @@ -618,7 +618,7 @@ The `log` argument is the final log record containing [our standard keys](#stand For exceptional cases where you want to completely replace our formatter logic, you can subclass `BasePowertoolsFormatter`. ???+ warning - You will need to implement `append_keys`, `clear_state`, override `format`, and optionally `remove_keys` to keep the same feature set Powertools Logger provides. This also means keeping state of logging keys added. + You will need to implement `append_keys`, `clear_state`, override `format`, and optionally `remove_keys` to keep the same feature set Powertools for AWS Lambda (Python) Logger provides. This also means keeping state of logging keys added. === "bring_your_own_formatter_from_scratch.py" @@ -688,7 +688,7 @@ for the given name and level to the logging module. By default, this logs all bo ---8<-- "examples/logger/src/enabling_boto_logging.py" ``` -### How can I enable Powertools logging for imported libraries? +### How can I enable Powertools for AWS Lambda (Python) logging for imported libraries? You can copy the Logger setup to all or sub-sets of registered external loggers. Use the `copy_config_to_registered_logger` method to do this. @@ -703,7 +703,7 @@ By default all registered loggers will be modified. You can change this behavior ### How can I add standard library logging attributes to a log record? -The Python standard library log records contains a [large set of attributes](https://docs.python.org/3/library/logging.html#logrecord-attributes){target="_blank"}, however only a few are included in Powertools Logger log record by default. +The Python standard library log records contains a [large set of attributes](https://docs.python.org/3/library/logging.html#logrecord-attributes){target="_blank"}, however only a few are included in Powertools for AWS Lambda (Python) Logger log record by default. You can include any of these logging attributes as key value arguments (`kwargs`) when instantiating `Logger` or `LambdaPowertoolsFormatter`. @@ -721,7 +721,7 @@ You can also add them later anywhere in your code with `append_keys`, or remove ---8<-- "examples/logger/src/append_and_remove_keys_output.json" ``` -For log records originating from Powertools Logger, the `name` attribute will be the same as `service`, for log records coming from standard library logger, it will be the name of the logger (i.e. what was used as name argument to `logging.getLogger`). +For log records originating from Powertools for AWS Lambda (Python) Logger, the `name` attribute will be the same as `service`, for log records coming from standard library logger, it will be the name of the logger (i.e. what was used as name argument to `logging.getLogger`). ### What's the difference between `append_keys` and `extra`? @@ -741,6 +741,6 @@ Here's an example where we persist `payment_id` not `request_id`. Note that `pay ---8<-- "examples/logger/src/append_keys_vs_extra_output.json" ``` -### How do I aggregate and search Powertools logs across accounts? +### How do I aggregate and search Powertools for AWS Lambda (Python) logs across accounts? As of now, ElasticSearch (ELK) or 3rd party solutions are best suited to this task. Please refer to this [discussion for more details](https://github.com/awslabs/aws-lambda-powertools-python/issues/460) diff --git a/docs/core/tracer.md b/docs/core/tracer.md index ae4af4e9cd5..0b701928d10 100644 --- a/docs/core/tracer.md +++ b/docs/core/tracer.md @@ -23,7 +23,7 @@ Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github. ### Install -!!! info "This is not necessary if you're installing Powertools via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}" +!!! info "This is not necessary if you're installing Powertools for AWS Lambda (Python) via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}" Add `aws-lambda-powertools[tracer]` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. This will ensure you have the required dependencies before using Tracer. diff --git a/docs/index.md b/docs/index.md index 2d64d574215..1a475cf0e02 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,14 +1,14 @@ --- title: Homepage -description: AWS Lambda Powertools for Python +description: Powertools for AWS Lambda (Python) --- <!-- markdownlint-disable MD043 MD013 --> -Powertools is a developer toolkit to implement Serverless best practices and increase developer velocity. +Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity. ???+ tip - Powertools is also available for [Java](https://awslabs.github.io/aws-lambda-powertools-java/){target="_blank"}, [TypeScript](https://awslabs.github.io/aws-lambda-powertools-typescript/latest/){target="_blank"}, and [.NET](https://awslabs.github.io/aws-lambda-powertools-dotnet/){target="_blank"} + Powertools for AWS Lambda (Python) is also available for [Java](https://awslabs.github.io/aws-lambda-powertools-java/){target="_blank"}, [TypeScript](https://awslabs.github.io/aws-lambda-powertools-typescript/latest/){target="_blank"}, and [.NET](https://awslabs.github.io/aws-lambda-powertools-dotnet/){target="_blank"} ??? hint "Support this project by becoming a reference customer, sharing your work, or using Layers/SAR :heart:" @@ -18,13 +18,13 @@ Powertools is a developer toolkit to implement Serverless best practices and inc 2) [**Share your work**](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=community-content&template=share_your_work.yml&title=%5BI+Made+This%5D%3A+%3CTITLE%3E). Blog posts, video, sample projects you used Powertools! - 3) Use [**Lambda Layers**](#lambda-layer) or [**SAR**](#sar), if possible. This helps us understand who uses Powertools in a non-intrusive way, and helps us gain future investments for other Powertools languages. + 3) Use [**Lambda Layers**](#lambda-layer) or [**SAR**](#sar), if possible. This helps us understand who uses Powertools for AWS Lambda (Python) in a non-intrusive way, and helps us gain future investments for other Powertools for AWS Lambda languages. - When using Layers, you can add Powertools as a dev dependency (or as part of your virtual env) to not impact the development process. + When using Layers, you can add Powertools for AWS Lambda (Python) as a dev dependency (or as part of your virtual env) to not impact the development process. ## Install -You can install Powertools using one of the following options: +You can install Powertools for AWS Lambda (Python) using one of the following options: * **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:32**](#){: .copyMe}:clipboard: * **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32**](#){: .copyMe}:clipboard: @@ -42,16 +42,16 @@ You can install Powertools using one of the following options: ### Local development -!!! info "Using Powertools via Lambda Layer? Simply add [**`"aws-lambda-powertools[all]"`**](#){: .copyMe}:clipboard: as a development dependency." +!!! info "Using Powertools for AWS Lambda (Python) via Lambda Layer? Simply add [**`"aws-lambda-powertools[all]"`**](#){: .copyMe}:clipboard: as a development dependency." -Powertools relies on the [AWS SDK bundled in the Lambda runtime](https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html){target="_blank"}. This helps us achieve an optimal package size and initialization. However, when developing locally, you need to install AWS SDK as a development dependency (not as a production dependency): +Powertools for AWS Lambda (Python) relies on the [AWS SDK bundled in the Lambda runtime](https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html){target="_blank"}. This helps us achieve an optimal package size and initialization. However, when developing locally, you need to install AWS SDK as a development dependency (not as a production dependency): * **Pip**: [**`pip install "aws-lambda-powertools[aws-sdk]"`**](#){: .copyMe}:clipboard: * **Poetry**: [**`poetry add "aws-lambda-powertools[aws-sdk]" --group dev`**](#){: .copyMe}:clipboard: * **Pipenv**: [**`pipenv install --dev "aws-lambda-powertools[aws-sdk]"`**](#){: .copyMe}:clipboard: ??? question "Why is that necessary?" - Powertools relies on the AWS SDK being available to use in the target runtime (AWS Lambda). + Powertools for AWS Lambda (Python) relies on the AWS SDK being available to use in the target runtime (AWS Lambda). As a result, it affects your favorite IDE in terms of code auto-completion, or running your tests suite locally with no Lambda emulation such as [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html){target="_blank"}. @@ -60,7 +60,7 @@ Powertools relies on the [AWS SDK bundled in the Lambda runtime](https://docs.aw In this context, `[aws-sdk]` is an alias to the `boto3` package. Due to dependency resolution, it'll either install: * **(A)** the SDK version available in [Lambda runtime](https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html){target="_blank"} -* **(B)** a more up-to-date version if another package you use also depends on `boto3`, for example [Powertools Tracer](core/tracer.md){target="_blank"} +* **(B)** a more up-to-date version if another package you use also depends on `boto3`, for example [Powertools for AWS Lambda (Python) Tracer](core/tracer.md){target="_blank"} ### Lambda Layer @@ -70,7 +70,7 @@ In this context, `[aws-sdk]` is an alias to the `boto3` package. Due to dependen For our Layers, we compile and optimize [all dependencies](https://github.com/awslabs/aws-lambda-powertools-python/blob/develop/pyproject.toml#L98){target="_blank"}, and [remove duplicate dependencies already available in the Lambda runtime](https://github.com/awslabs/cdk-aws-lambda-powertools-layer/blob/main/layer/Python/Dockerfile#L36){target="_blank"} to achieve the most optimal size. -You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs.aws.amazon.com/lambda/latest/dg/invocation-layers.html#invocation-layers-using){target="_blank"}, or your preferred deployment framework. +You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambda Console](https://docs.aws.amazon.com/lambda/latest/dg/invocation-layers.html#invocation-layers-using){target="_blank"}, or your preferred deployment framework. ??? note "Note: Click to expand and copy any regional Lambda Layer ARN" @@ -580,14 +580,14 @@ Compared with the [public Layer ARN](#lambda-layer) option, SAR allows you to ch variable "aws_powertools_version" { type = string default = "2.0.0" - description = "The AWS Powertools release version" + description = "The Powertools for AWS Lambda (Python) release version" } output "deployed_powertools_sar_version" { value = data.aws_serverlessapplicationrepository_application.sar_app.semantic_version } - # Fetch Powertools Layer ARN from deployed SAR App + # Fetch Powertools for AWS Lambda (Python) Layer ARN from deployed SAR App output "aws_lambda_powertools_layer_arn" { value = aws_serverlessapplicationrepository_cloudformation_stack.deploy_sar_stack.outputs.LayerVersionArn } @@ -635,7 +635,7 @@ Compared with the [public Layer ARN](#lambda-layer) option, SAR allows you to ch - serverlessrepo:CreateCloudFormationTemplate - serverlessrepo:GetCloudFormationTemplate Resource: - # this is arn of the powertools SAR app + # this is arn of the Powertools for AWS Lambda (Python) SAR app - arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer - Sid: S3AccessLayer Effect: Allow @@ -643,7 +643,7 @@ Compared with the [public Layer ARN](#lambda-layer) option, SAR allows you to ch - s3:GetObject Resource: # AWS publishes to an external S3 bucket locked down to your account ID - # The below example is us publishing lambda powertools + # The below example is us publishing Powertools for AWS Lambda (Python) # Bucket: awsserverlessrepo-changesets-plntc6bfnfj # Key: *****/arn:aws:serverlessrepo:eu-west-1:057560766410:applications-aws-lambda-powertools-python-layer-versions-1.10.2/aeeccf50-****-****-****-********* - arn:aws:s3:::awsserverlessrepo-changesets-*/* @@ -675,7 +675,7 @@ sam init --app-template hello-world-powertools-python --name sam-app --package-t ## Features -Core utilities such as Tracing, Logging, Metrics, and Event Handler will be available across all Powertools languages. Additional utilities are subjective to each language ecosystem and customer demand. +Core utilities such as Tracing, Logging, Metrics, and Event Handler will be available across all Powertools for AWS Lambda languages. Additional utilities are subjective to each language ecosystem and customer demand. | Utility | Description | | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------- | @@ -737,11 +737,11 @@ As a best practice for libraries, Powertools module logging statements are suppr When necessary, you can use `POWERTOOLS_DEBUG` environment variable to enable debugging. This will provide additional information on every internal operation. -## How to support AWS Lambda Powertools for Python? +## How to support Powertools for AWS Lambda (Python)? ### Becoming a reference customer -Knowing which companies are using this library is important to help prioritize the project internally. If your company is using AWS Lambda Powertools for Python, you can request to have your name and logo added to the README file by raising a [Support Lambda Powertools (become a reference)](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=customer-reference&template=support_powertools.yml&title=%5BSupport+Lambda+Powertools%5D%3A+%3Cyour+organization+name%3E){target="_blank"} issue. +Knowing which companies are using this library is important to help prioritize the project internally. If your company is using Powertools for AWS Lambda (Python), you can request to have your name and logo added to the README file by raising a [Support Powertools for AWS Lambda (Python) (become a reference)](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=customer-reference&template=support_powertools.yml&title=%5BSupport+Lambda+Powertools%5D%3A+%3Cyour+organization+name%3E){target="_blank"} issue. The following companies, among others, use Powertools: @@ -757,11 +757,11 @@ The following companies, among others, use Powertools: ### Sharing your work -Share what you did with Powertools 💞💞. Blog post, workshops, presentation, sample apps and others. Check out what the community has already shared about Powertools [here](https://awslabs.github.io/aws-lambda-powertools-python/latest/we_made_this/). +Share what you did with Powertools for AWS Lambda (Python) 💞💞. Blog post, workshops, presentation, sample apps and others. Check out what the community has already shared about Powertools for AWS Lambda (Python) [here](https://awslabs.github.io/aws-lambda-powertools-python/latest/we_made_this/). ### Using Lambda Layer or SAR -This helps us understand who uses Powertools in a non-intrusive way, and helps us gain future investments for other Powertools languages. When [using Layers](https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer), you can add Powertools as a dev dependency (or as part of your virtual env) to not impact the development process. +This helps us understand who uses Powertools for AWS Lambda (Python) in a non-intrusive way, and helps us gain future investments for other Powertools for AWS Lambda languages. When [using Layers](https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer), you can add Powertools for AWS Lambda (Python) as a dev dependency (or as part of your virtual env) to not impact the development process. ## Tenets diff --git a/docs/roadmap.md b/docs/roadmap.md index 084028672ac..bd082bc8ef7 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -93,7 +93,7 @@ Our end-to-end mechanism follows four major steps: ## Disclaimer -The AWS Lambda Powertools team values feedback and guidance from its community of users, although final decisions on inclusion into the project will be made by AWS. +The Powertools for AWS Lambda (Python) team values feedback and guidance from its community of users, although final decisions on inclusion into the project will be made by AWS. We determine the high-level direction for our open roadmap based on customer feedback and popularity (👍🏽 and comments), security and operational impacts, and business value. Where features don’t meet our goals and longer-term strategy, we will communicate that clearly and openly as quickly as possible with an explanation of why the decision was made. diff --git a/docs/tutorial/index.md b/docs/tutorial/index.md index 9965d70d267..29ec6ebadd2 100644 --- a/docs/tutorial/index.md +++ b/docs/tutorial/index.md @@ -1,11 +1,11 @@ --- title: Tutorial -description: Powertools introduction +description: Powertools for AWS Lambda (Python) introduction --- <!-- markdownlint-disable MD043 MD041 --> -This tutorial progressively introduces Lambda Powertools core utilities by using one feature at a time. +This tutorial progressively introduces Powertools for AWS Lambda (Python) core utilities by using one feature at a time. ## Requirements @@ -323,7 +323,7 @@ We can massively simplify cross-cutting concerns while keeping it lightweight by ???+ tip This is available for both [REST API (API Gateway, ALB)](../core/event_handler/api_gateway.md){target="_blank"} and [GraphQL API (AppSync)](../core/event_handler/appsync.md){target="_blank"}. -Let's include Lambda Powertools as a dependency in `requirement.txt`, and use Event Handler to refactor our previous example. +Let's include Powertools for AWS Lambda (Python) as a dependency in `requirement.txt`, and use Event Handler to refactor our previous example. === "app.py" @@ -452,11 +452,11 @@ We could start by creating a dictionary with Lambda context information or somet ### Simplifying with Logger ???+ question "Surely this could be easier, right?" - Yes! Powertools Logger to the rescue :-) + Yes! Powertools for AWS Lambda (Python) Logger to the rescue :-) -As we already have Lambda Powertools as a dependency, we can simply import [Logger](../core/logger.md){target="_blank"}. +As we already have Powertools for AWS Lambda (Python) as a dependency, we can simply import [Logger](../core/logger.md){target="_blank"}. -```python title="Refactoring with Lambda Powertools Logger" hl_lines="1 3 5 12 18 22" +```python title="Refactoring with Powertools for AWS Lambda (Python) Logger" hl_lines="1 3 5 12 18 22" from aws_lambda_powertools import Logger from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths @@ -485,7 +485,7 @@ def lambda_handler(event, context): Let's break this down: -* **L5**: We add Lambda Powertools Logger; the boilerplate is now done for you. By default, we set `INFO` as the logging level if `LOG_LEVEL` env var isn't set. +* **L5**: We add Powertools for AWS Lambda (Python) Logger; the boilerplate is now done for you. By default, we set `INFO` as the logging level if `LOG_LEVEL` env var isn't set. * **L22**: We use `logger.inject_lambda_context` decorator to inject key information from Lambda context into every log. * **L22**: We also instruct Logger to use the incoming API Gateway Request ID as a [correlation id](../core/logger.md##set_correlation_id-method) automatically. * **L22**: Since we're in dev, we also use `log_event=True` to automatically log each incoming request for debugging. This can be also set via [environment variables](./index.md#environment-variables){target="_blank"}. @@ -535,7 +535,7 @@ It's a [two-step process](https://docs.aws.amazon.com/lambda/latest/dg/services- 1. Enable tracing in your Lambda function. 2. Instrument your application code. -Let's explore how we can instrument our code with [AWS X-Ray SDK](https://docs.aws.amazon.com/xray-sdk-for-python/latest/reference/index.html){target="_blank"}, and then simplify it with [Lambda Powertools Tracer](../core/tracer.md){target="_blank"} feature. +Let's explore how we can instrument our code with [AWS X-Ray SDK](https://docs.aws.amazon.com/xray-sdk-for-python/latest/reference/index.html){target="_blank"}, and then simplify it with [Powertools for AWS Lambda (Python) Tracer](../core/tracer.md){target="_blank"} feature. === "app.py" @@ -725,12 +725,12 @@ If you choose any of the traces available, try opening the `handler` subsegment Cross-cutting concerns like filtering traces by Cold Start, including response as well as exceptions as tracing metadata can take a considerable amount of boilerplate. -We can simplify our previous patterns by using [Lambda Powertools Tracer](../core/tracer.md){target="_blank"}; a thin wrapper on top of X-Ray SDK. +We can simplify our previous patterns by using [Powertools for AWS Lambda (Python) Tracer](../core/tracer.md){target="_blank"}; a thin wrapper on top of X-Ray SDK. ???+ note You can now safely remove `aws-xray-sdk` from `requirements.txt`; keep `aws-lambda-powertools` only. -```python title="Refactoring with Lambda Powertools Tracer" hl_lines="1 6 11 13 19 21 27" +```python title="Refactoring with Powertools for AWS Lambda (Python) Tracer" hl_lines="1 6 11 13 19 21 27" from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths @@ -771,14 +771,14 @@ Decorators, annotations and metadata are largely the same, except we now have a Another subtle difference is that you can now run your Lambda functions and unit test them locally without having to explicitly disable Tracer. -Lambda Powertools optimizes for Lambda compute environment. As such, we add these and other common approaches to accelerate your development, so you don't worry about implementing every cross-cutting concern. +Powertools for AWS Lambda (Python) optimizes for Lambda compute environment. As such, we add these and other common approaches to accelerate your development, so you don't worry about implementing every cross-cutting concern. ???+ tip You can [opt-out some of these behaviours](../core/tracer/#advanced){target="_blank"} like disabling response capturing, explicitly patching only X modules, etc. Repeat the process of building, deploying, and invoking your application via the API endpoint. Within the [AWS X-Ray Console](https://console.aws.amazon.com/xray/home#/traces/){target="_blank"}, you should see a similar view: -![AWS X-Ray Console trace view using Lambda Powertools Tracer](../media/tracer_utility_showcase_2.png) +![AWS X-Ray Console trace view using Powertools for AWS Lambda (Python) Tracer](../media/tracer_utility_showcase_2.png) ???+ tip Consider using [Amazon CloudWatch ServiceLens view](https://console.aws.amazon.com/cloudwatch/home#servicelens:service-map/map){target="_blank"} as it aggregates AWS X-Ray traces and CloudWatch metrics and logs in one view. @@ -801,7 +801,7 @@ By default, AWS Lambda adds [invocation and performance metrics](https://docs.aw ???+ tip You can [optionally enable detailed metrics](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-metrics-and-dimensions.html#api-gateway-metricdimensions){target="_blank"} per each API route, stage, and method in API Gateway. -Let's expand our application with custom metrics using AWS SDK to see how it works, then let's upgrade it with Lambda Powertools :-) +Let's expand our application with custom metrics using AWS SDK to see how it works, then let's upgrade it with Powertools for AWS Lambda (Python) :-) === "app.py" @@ -936,7 +936,7 @@ Within `template.yaml`, we add [CloudWatchPutMetricPolicy](https://docs.aws.amaz ### Simplifying with Metrics -[Lambda Powertools Metrics](../core/metrics.md){target="_blank} uses [Amazon CloudWatch Embedded Metric Format (EMF)](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format.html) to create custom metrics **asynchronously** via a native integration with Lambda. +[Powertools for AWS Lambda (Python) Metrics](../core/metrics.md){target="_blank} uses [Amazon CloudWatch Embedded Metric Format (EMF)](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format.html) to create custom metrics **asynchronously** via a native integration with Lambda. In general terms, EMF is a specification that expects metrics in a JSON payload within CloudWatch Logs. Lambda ingests all logs emitted by a given function into CloudWatch Logs. CloudWatch automatically looks up for log entries that follow the EMF format and transforms them into a CloudWatch metric. @@ -945,7 +945,7 @@ In general terms, EMF is a specification that expects metrics in a JSON payload Let's implement that using [Metrics](../core/metrics.md){target="_blank}: -```python title="Refactoring with Lambda Powertools Metrics" hl_lines="1 4 9 18 27 33" +```python title="Refactoring with Powertools for AWS Lambda (Python) Metrics" hl_lines="1 4 9 18 27 33" from aws_lambda_powertools import Logger, Tracer, Metrics from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths @@ -1034,7 +1034,7 @@ If you're curious about how the EMF portion of your function logs look like, you ## Final considerations -We covered a lot of ground here and we only scratched the surface of the feature set available within Lambda Powertools. +We covered a lot of ground here and we only scratched the surface of the feature set available within Powertools for AWS Lambda (Python). When it comes to the observability features ([Tracer](../core/tracer.md){target="_blank"}, [Metrics](../core/metrics.md){target="_blank"}, [Logging](../core/logger.md){target="_blank"}), don't stop there! The goal here is to ensure you can ask arbitrary questions to assess your system's health; these features are only part of the wider story! @@ -1043,7 +1043,7 @@ This requires a change in mindset to ensure operational excellence is part of th ???+ tip You can find more details on other leading practices described in the [Well-Architected Serverless Lens](https://aws.amazon.com/blogs/aws/new-serverless-lens-in-aws-well-architected-tool/). - Lambda Powertools is largely designed to make some of these practices easier to adopt from day 1. + Powertools for AWS Lambda (Python) is largely designed to make some of these practices easier to adopt from day 1. ???+ question "Have ideas for other tutorials?" You can open up a [documentation issue](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=documentation&template=documentation-improvements.md&title=Tutorial%20Suggestion){target="_blank"}, or via e-mail [aws-lambda-powertools-feedback@amazon.com](mailto:aws-lambda-powertools-feedback@amazon.com). diff --git a/docs/upgrade.md b/docs/upgrade.md index 948ce6fc873..097c2d35b2a 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -1,15 +1,15 @@ --- title: Upgrade guide -description: Guide to update between major Powertools versions +description: Guide to update between major Powertools for AWS Lambda (Python) versions --- <!-- markdownlint-disable MD043 --> ## End of support v1 -!!! warning "On March 31st, 2023, AWS Lambda Powertools for Python v1 reached end of support and will no longer receive updates or releases. If you are still using v1, we strongly recommend you to read our upgrade guide and update to the latest version." +!!! warning "On March 31st, 2023, Powertools for AWS Lambda (Python) v1 reached end of support and will no longer receive updates or releases. If you are still using v1, we strongly recommend you to read our upgrade guide and update to the latest version." -Given our commitment to all of our customers using AWS Lambda Powertools for Python, we will keep [Pypi](https://pypi.org/project/aws-lambda-powertools/) v1 releases and documentation 1.x versions to prevent any disruption. +Given our commitment to all of our customers using Powertools for AWS Lambda (Python), we will keep [Pypi](https://pypi.org/project/aws-lambda-powertools/) v1 releases and documentation 1.x versions to prevent any disruption. ## Migrate to v2 from v1 diff --git a/docs/utilities/feature_flags.md b/docs/utilities/feature_flags.md index efe41c2f82f..79657c436f6 100644 --- a/docs/utilities/feature_flags.md +++ b/docs/utilities/feature_flags.md @@ -257,7 +257,7 @@ You can also have features enabled only at specific days, for example: enable ch ???+ info "How should I use timezones?" You can use any [IANA time zone](https://www.iana.org/time-zones){target="_blank"} (as originally specified in [PEP 615](https://peps.python.org/pep-0615/){target="_blank"}) as part of your rules definition. - Powertools takes care of converting and calculate the correct timestamps for you. + Powertools for AWS Lambda (Python) takes care of converting and calculate the correct timestamps for you. When using `SCHEDULE_BETWEEN_DATETIME_RANGE`, use timestamps without timezone information, and specify the timezone manually. This way, you'll avoid hitting problems with day light savings. @@ -453,7 +453,7 @@ These are the available options for further customization. | **max_age** | `5` | Number of seconds to cache feature flags configuration fetched from AWS AppConfig | | **sdk_config** | `None` | [Botocore Config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html){target="_blank"} | | **jmespath_options** | `None` | For advanced use cases when you want to bring your own [JMESPath functions](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank"} | -| **logger** | `logging.Logger` | Logger to use for debug. You can optionally supply an instance of Powertools Logger. | +| **logger** | `logging.Logger` | Logger to use for debug. You can optionally supply an instance of Powertools for AWS Lambda (Python) Logger. | === "appconfig_provider_options.py" diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 81afa8b0117..f43dc68487f 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -410,7 +410,7 @@ Imagine the function executes successfully, but the client never receives the re ???+ note This is automatically done when you decorate your Lambda handler with [@idempotent decorator](#idempotent-decorator). -To prevent against extended failed retries when a [Lambda function times out](https://aws.amazon.com/premiumsupport/knowledge-center/lambda-verify-invocation-timeouts/), Powertools calculates and includes the remaining invocation available time as part of the idempotency record. +To prevent against extended failed retries when a [Lambda function times out](https://aws.amazon.com/premiumsupport/knowledge-center/lambda-verify-invocation-timeouts/), Powertools for AWS Lambda (Python) calculates and includes the remaining invocation available time as part of the idempotency record. ???+ example If a second invocation happens **after** this timestamp, and the record is marked as `INPROGRESS`, we will execute the invocation again as if it was in the `EXPIRED` state (e.g, `expire_seconds` field elapsed). @@ -1156,7 +1156,7 @@ def lambda_handler(event, context): return {"message": event['message'], "statusCode": 200} ``` -???+ tip "Tip: JMESPath Powertools functions are also available" +???+ tip "Tip: JMESPath Powertools for AWS Lambda (Python) functions are also available" Built-in functions known in the validation utility like `powertools_json`, `powertools_base64`, `powertools_base64_gzip` are also available to use in this utility. ## Testing your code diff --git a/docs/utilities/jmespath_functions.md b/docs/utilities/jmespath_functions.md index e86fb824faf..5550cdc507e 100644 --- a/docs/utilities/jmespath_functions.md +++ b/docs/utilities/jmespath_functions.md @@ -6,7 +6,7 @@ description: Utility <!-- markdownlint-disable MD043 --> ???+ tip - JMESPath is a query language for JSON used by AWS CLI, AWS Python SDK, and AWS Lambda Powertools for Python. + JMESPath is a query language for JSON used by AWS CLI, AWS Python SDK, and Powertools for AWS Lambda (Python). Built-in [JMESPath](https://jmespath.org/){target="_blank"} Functions to easily deserialize common encoded JSON payloads in Lambda functions. @@ -23,7 +23,7 @@ Built-in [JMESPath](https://jmespath.org/){target="_blank"} Functions to easily You might have events that contains encoded JSON payloads as string, base64, or even in compressed format. It is a common use case to decode and extract them partially or fully as part of your Lambda function invocation. -Powertools also have utilities like [validation](validation.md), [idempotency](idempotency.md), or [feature flags](feature_flags.md) where you might need to extract a portion of your data before using them. +Powertools for AWS Lambda (Python) also have utilities like [validation](validation.md), [idempotency](idempotency.md), or [feature flags](feature_flags.md) where you might need to extract a portion of your data before using them. ???+ info "Terminology" **Envelope** is the terminology we use for the **JMESPath expression** to extract your JSON object from your data input. We might use those two terms interchangeably. @@ -90,7 +90,7 @@ These are all built-in envelopes you can use along with their expression as a re You can use our built-in JMESPath functions within your envelope expression. They handle deserialization for common data formats found in AWS Lambda event sources such as JSON strings, base64, and uncompress gzip data. ???+ info - We use these everywhere in Powertools to easily decode and unwrap events from Amazon API Gateway, Amazon Kinesis, AWS CloudWatch Logs, etc. + We use these everywhere in Powertools for AWS Lambda (Python) to easily decode and unwrap events from Amazon API Gateway, Amazon Kinesis, AWS CloudWatch Logs, etc. #### powertools_json function @@ -187,7 +187,7 @@ This sample will decompress and decode base64 data from Cloudwatch Logs, then us ???+ warning This should only be used for advanced use cases where you have special formats not covered by the built-in functions. -For special binary formats that you want to decode before applying JSON Schema validation, you can bring your own [JMESPath function](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank"} and any additional option via `jmespath_options` param. To keep Powertools built-in functions, you can subclass from `PowertoolsFunctions`. +For special binary formats that you want to decode before applying JSON Schema validation, you can bring your own [JMESPath function](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank"} and any additional option via `jmespath_options` param. To keep Powertools for AWS Lambda (Python) built-in functions, you can subclass from `PowertoolsFunctions`. Here is an example of how to decompress messages using [snappy](https://github.com/andrix/python-snappy){target="_blank"}: diff --git a/docs/utilities/middleware_factory.md b/docs/utilities/middleware_factory.md index 4d125b3c006..1552311ea17 100644 --- a/docs/utilities/middleware_factory.md +++ b/docs/utilities/middleware_factory.md @@ -109,9 +109,9 @@ When executed, your middleware name will [appear in AWS X-Ray Trace details as]( ![Middleware simple Tracer](../media/middleware_factory_tracer_1.png) -### Combining Powertools utilities +### Combining Powertools for AWS Lambda (Python) utilities -You can create your own middleware and combine many features of Lambda Powertools such as [trace](../core/logger.md), [logs](../core/logger.md), [feature flags](feature_flags.md), [validation](validation.md), [jmespath_functions](jmespath_functions.md) and others to abstract non-functional code. +You can create your own middleware and combine many features of Powertools for AWS Lambda (Python) such as [trace](../core/logger.md), [logs](../core/logger.md), [feature flags](feature_flags.md), [validation](validation.md), [jmespath_functions](jmespath_functions.md) and others to abstract non-functional code. In the example below, we create a Middleware with the following features: diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 6607e7b07b0..5ff419f8777 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -16,7 +16,7 @@ This utility provides data parsing and deep validation using [Pydantic](https:// ### Install -!!! info "This is not necessary if you're installing Powertools via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}" +!!! info "This is not necessary if you're installing Powertools for AWS Lambda (Python) via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}" Add `aws-lambda-powertools[parser]` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. This will ensure you have the required dependencies before using Parser. diff --git a/docs/utilities/validation.md b/docs/utilities/validation.md index 277a1f91f81..cef3b1e3134 100644 --- a/docs/utilities/validation.md +++ b/docs/utilities/validation.md @@ -32,7 +32,7 @@ You can also use the standalone `validate` function, if you want more control ov ### Install -!!! info "This is not necessary if you're installing Powertools via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}" +!!! info "This is not necessary if you're installing Powertools for AWS Lambda (Python) via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}" Add `aws-lambda-powertools[validation]` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. This will ensure you have the required dependencies before using Validation. diff --git a/docs/we_made_this.md b/docs/we_made_this.md index a9022b68e5d..f160b30e39c 100644 --- a/docs/we_made_this.md +++ b/docs/we_made_this.md @@ -1,11 +1,11 @@ --- title: We Made This (Community) -description: Blog posts, tutorials, and videos about AWS Lambda Powertools created by the Powertools Community. +description: Blog posts, tutorials, and videos about Powertools for AWS Lambda (Python) created by the Powertools for AWS Lambda (Python) Community. --- <!-- markdownlint-disable MD001 MD043 --> -This space is dedicated to highlight our awesome community content featuring Powertools 🙏! +This space is dedicated to highlight our awesome community content featuring Powertools for AWS Lambda (Python) 🙏! !!! info "[Get your content featured here](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=community-content&template=share_your_work.yml&title=%5BI+Made+This%5D%3A+%3CTITLE%3E){target="_blank"}!" @@ -13,7 +13,7 @@ This space is dedicated to highlight our awesome community content featuring Pow [![Join our Discord](https://dcbadge.vercel.app/api/server/B8zZKbbyET)](https://discord.gg/B8zZKbbyET){target="_blank"} -Join us on [Discord](https://discord.gg/B8zZKbbyET){target="_blank"} to connect with the Powertools community 👋. Ask questions, learn from each other, contribute, hang out with key contributors, and more! +Join us on [Discord](https://discord.gg/B8zZKbbyET){target="_blank"} to connect with the Powertools for AWS Lambda (Python) community 👋. Ask questions, learn from each other, contribute, hang out with key contributors, and more! ## Blog posts diff --git a/examples/event_handler_graphql/sam/template.yaml b/examples/event_handler_graphql/sam/template.yaml index 7639114802c..b676d957b9d 100644 --- a/examples/event_handler_graphql/sam/template.yaml +++ b/examples/event_handler_graphql/sam/template.yaml @@ -9,7 +9,7 @@ Globals: Tracing: Active Environment: Variables: - # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/latest/#environment-variables + # Powertools for AWS Lambda (Python) env vars: https://awslabs.github.io/aws-lambda-powertools-python/latest/#environment-variables LOG_LEVEL: INFO POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 POWERTOOLS_LOGGER_LOG_EVENT: true diff --git a/examples/logger/sam/template.yaml b/examples/logger/sam/template.yaml index c45f6cd5e3b..7e4bde3512d 100644 --- a/examples/logger/sam/template.yaml +++ b/examples/logger/sam/template.yaml @@ -1,6 +1,6 @@ AWSTemplateFormatVersion: "2010-09-09" Transform: AWS::Serverless-2016-10-31 -Description: AWS Lambda Powertools Tracer doc examples +Description: Powertools for AWS Lambda (Python) version Globals: Function: diff --git a/examples/metrics/sam/template.yaml b/examples/metrics/sam/template.yaml index 9f2784680e5..f300066b752 100644 --- a/examples/metrics/sam/template.yaml +++ b/examples/metrics/sam/template.yaml @@ -1,6 +1,6 @@ AWSTemplateFormatVersion: "2010-09-09" Transform: AWS::Serverless-2016-10-31 -Description: AWS Lambda Powertools Metrics doc examples +Description: Powertools for AWS Lambda (Python) version Globals: Function: diff --git a/examples/tracer/sam/template.yaml b/examples/tracer/sam/template.yaml index 788c007cd86..f0da401d516 100644 --- a/examples/tracer/sam/template.yaml +++ b/examples/tracer/sam/template.yaml @@ -1,6 +1,6 @@ AWSTemplateFormatVersion: "2010-09-09" Transform: AWS::Serverless-2016-10-31 -Description: AWS Lambda Powertools Tracer doc examples +Description: Powertools for AWS Lambda (Python) version Globals: Function: diff --git a/layer/README.md b/layer/README.md index 99da0083ffc..3d43a66a7f1 100644 --- a/layer/README.md +++ b/layer/README.md @@ -1,11 +1,11 @@ <!-- markdownlint-disable MD041 MD043--> -# CDK Powertools layer +# CDK Powertools for AWS Lambda (Python) layer -This is a CDK project to build and deploy AWS Lambda Powertools [Lambda layer](https://docs.aws.amazon.com/lambda/latest/dg/gettingstarted-concepts.html#gettingstarted-concepts-layer) to multiple commercial regions. +This is a CDK project to build and deploy Powertools for AWS Lambda (Python) [Lambda layer](https://docs.aws.amazon.com/lambda/latest/dg/gettingstarted-concepts.html#gettingstarted-concepts-layer) to multiple commercial regions. ## Build the layer -To build the layer construct you need to provide the Powertools version that is [available in PyPi](https://pypi.org/project/aws-lambda-powertools/). +To build the layer construct you need to provide the Powertools for AWS Lambda (Python) version that is [available in PyPi](https://pypi.org/project/aws-lambda-powertools/). You can pass it as a context variable when running `synth` or `deploy`, ```shell @@ -22,6 +22,6 @@ The layer stack writes the layer ARN after the deployment as SSM parameter and t ## Version tracking AWS Lambda versions Lambda layers by incrementing a number at the end of the ARN. -This makes it challenging to know which Powertools version a layer contains. -For better tracking of the ARNs and the corresponding version we need to keep track which powertools version was deployed to which layer. -To achieve that we created two components. First, we created a version tracking app which receives events via EventBridge. Second, after a successful canary deployment we send the layer ARN, Powertools version, and the region to this EventBridge. +This makes it challenging to know which Powertools for AWS Lambda (Python) version a layer contains. +For better tracking of the ARNs and the corresponding version we need to keep track which Powertools for AWS Lambda (Python) version was deployed to which layer. +To achieve that we created two components. First, we created a version tracking app which receives events via EventBridge. Second, after a successful canary deployment we send the layer ARN, Powertools for AWS Lambda (Python) version, and the region to this EventBridge. diff --git a/layer/app.py b/layer/app.py index f9d0f778df0..7bc5d8b0103 100644 --- a/layer/app.py +++ b/layer/app.py @@ -13,7 +13,7 @@ if not POWERTOOLS_VERSION: raise ValueError( - "Please set the version for Powertools by passing the '--context version=<version>' parameter to the CDK " + "Please set the version for Powertools for AWS Lambda (Python) by passing the '--context version=<version>' parameter to the CDK " "synth step." ) diff --git a/layer/layer/canary/app.py b/layer/layer/canary/app.py index e9d8d5d7679..9dea0297690 100644 --- a/layer/layer/canary/app.py +++ b/layer/layer/canary/app.py @@ -81,17 +81,17 @@ def check_envs(): def verify_powertools_version() -> None: """ - fetches the version that we import from the powertools layer and compares + fetches the version that we import from the Powertools for AWS Lambda (Python) layer and compares it with expected version set in environment variable, which we pass during deployment. :raise ValueError if the expected version is not the same as the version we get from the layer """ - logger.info("Checking Powertools version in library...") + logger.info("Checking Powertools for AWS Lambda (Python) version in library...") current_version = version("aws_lambda_powertools") if powertools_version != current_version: raise ValueError( - f'Expected Powertools version is "{powertools_version}", but layer contains version "{current_version}"' + f'Expected Powertools for AWS Lambda (Python) version is "{powertools_version}", but layer contains version "{current_version}"' ) - logger.info(f"Current Powertools version is: {current_version} [{_get_architecture()}]") + logger.info(f"Current Powertools for AWS Lambda (Python) version is: {current_version} [{_get_architecture()}]") def send_notification(): diff --git a/layer/pyproject.toml b/layer/pyproject.toml index 5be2628e825..f3ea621cf0f 100644 --- a/layer/pyproject.toml +++ b/layer/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "aws-lambda-powertools-python-layer" version = "1.1.0" -description = "AWS Lambda Powertools for Python Lambda Layers" +description = "Powertools for AWS Lambda (Python) Lambda Layers" authors = ["DevAx <aws-devax-open-source@amazon.com>"] license = "MIT" diff --git a/layer/scripts/layer-balancer/main.go b/layer/scripts/layer-balancer/main.go index cf2f0c1728e..0a800a5c524 100644 --- a/layer/scripts/layer-balancer/main.go +++ b/layer/scripts/layer-balancer/main.go @@ -32,12 +32,12 @@ type LayerInfo struct { var canonicalLayers = []LayerInfo{ { Name: "AWSLambdaPowertoolsPythonV2", - Description: "Lambda Powertools for Python [x86_64] with extra dependencies version bump", + Description: "Powertools for AWS Lambda (Python) [x86_64] with extra dependencies version bump", Architecture: types.ArchitectureX8664, }, { Name: "AWSLambdaPowertoolsPythonV2-Arm64", - Description: "Lambda Powertools for Python [arm64] with extra dependencies version bump", + Description: "Powertools for AWS Lambda (Python) [arm64] with extra dependencies version bump", Architecture: types.ArchitectureArm64, }, } diff --git a/mkdocs.yml b/mkdocs.yml index 2880881af91..ce383a1b028 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,5 +1,5 @@ -site_name: AWS Lambda Powertools for Python -site_description: AWS Lambda Powertools for Python +site_name: Powertools for AWS Lambda (Python) +site_description: Powertools for AWS Lambda (Python) site_author: Amazon Web Services repo_url: https://github.com/awslabs/aws-lambda-powertools-python edit_uri: edit/develop/docs diff --git a/pyproject.toml b/pyproject.toml index 428dc6ab96b..75e7e4d23a7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "aws_lambda_powertools" version = "2.15.0" -description = "AWS Lambda Powertools is a developer toolkit to implement Serverless best practices and increase developer velocity." +description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] classifiers=[ @@ -114,7 +114,7 @@ branch = true [tool.coverage.html] directory = "test_report" -title = "Lambda Powertools Test Coverage" +title = "Powertools for AWS Lambda (Python) Test Coverage" [tool.coverage.report] fail_under = 90 diff --git a/tests/e2e/idempotency/test_idempotency_dynamodb.py b/tests/e2e/idempotency/test_idempotency_dynamodb.py index 31382ff9050..f82a6c84441 100644 --- a/tests/e2e/idempotency/test_idempotency_dynamodb.py +++ b/tests/e2e/idempotency/test_idempotency_dynamodb.py @@ -35,7 +35,7 @@ def idempotency_table_name(infrastructure: dict) -> str: @pytest.mark.xdist_group(name="idempotency") def test_ttl_caching_expiration_idempotency(ttl_cache_expiration_handler_fn_arn: str): # GIVEN - payload = json.dumps({"message": "Lambda Powertools - TTL 5s"}) + payload = json.dumps({"message": "Powertools for AWS Lambda (Python) - TTL 5s"}) # WHEN # first execution @@ -65,8 +65,8 @@ def test_ttl_caching_expiration_idempotency(ttl_cache_expiration_handler_fn_arn: @pytest.mark.xdist_group(name="idempotency") def test_ttl_caching_timeout_idempotency(ttl_cache_timeout_handler_fn_arn: str): # GIVEN - payload_timeout_execution = json.dumps({"sleep": 5, "message": "Lambda Powertools - TTL 1s"}) - payload_working_execution = json.dumps({"sleep": 0, "message": "Lambda Powertools - TTL 1s"}) + payload_timeout_execution = json.dumps({"sleep": 5, "message": "Powertools for AWS Lambda (Python) - TTL 1s"}) + payload_working_execution = json.dumps({"sleep": 0, "message": "Powertools for AWS Lambda (Python) - TTL 1s"}) # WHEN # first call should fail due to timeout @@ -89,7 +89,7 @@ def test_ttl_caching_timeout_idempotency(ttl_cache_timeout_handler_fn_arn: str): @pytest.mark.xdist_group(name="idempotency") def test_parallel_execution_idempotency(parallel_execution_handler_fn_arn: str): # GIVEN - arguments = json.dumps({"message": "Lambda Powertools - Parallel execution"}) + arguments = json.dumps({"message": "Powertools for AWS Lambda (Python) - Parallel execution"}) # WHEN # executing Lambdas in parallel @@ -107,7 +107,7 @@ def test_parallel_execution_idempotency(parallel_execution_handler_fn_arn: str): @pytest.mark.xdist_group(name="idempotency") def test_idempotent_function_thread_safety(function_thread_safety_handler_fn_arn: str): # GIVEN - payload = json.dumps({"message": "Lambda Powertools - Idempotent function thread safety check"}) + payload = json.dumps({"message": "Powertools for AWS Lambda (Python) - Idempotent function thread safety check"}) # WHEN # first execution diff --git a/tests/e2e/parameters/infrastructure.py b/tests/e2e/parameters/infrastructure.py index 58065ea9848..db76a68def3 100644 --- a/tests/e2e/parameters/infrastructure.py +++ b/tests/e2e/parameters/infrastructure.py @@ -40,7 +40,7 @@ def _create_app_config(self, function: Function): self.stack, id="appconfig-app", name=f"powertools-e2e-{service_name}", - description="Lambda Powertools End-to-End testing for AppConfig", + description="Powertools for AWS Lambda (Python) End-to-End testing for AppConfig", ) CfnOutput(self.stack, "AppConfigApplication", value=cfn_application.name) @@ -49,7 +49,7 @@ def _create_app_config(self, function: Function): "appconfig-env", application_id=cfn_application.ref, name=f"powertools-e2e{service_name}", - description="Lambda Powertools End-to-End testing environment", + description="Powertools for AWS Lambda (Python) End-to-End testing environment", ) CfnOutput(self.stack, "AppConfigEnvironment", value=cfn_environment.name) diff --git a/tests/e2e/utils/data_fetcher/logs.py b/tests/e2e/utils/data_fetcher/logs.py index 79fcee9290b..c81bd1bca7d 100644 --- a/tests/e2e/utils/data_fetcher/logs.py +++ b/tests/e2e/utils/data_fetcher/logs.py @@ -31,7 +31,7 @@ def __init__( filter_expression: Optional[str] = None, minimum_log_entries: int = 1, ): - """Fetch and expose Powertools Logger logs from CloudWatch Logs + """Fetch and expose Powertools for AWS Lambda (Python) Logger logs from CloudWatch Logs Parameters ---------- diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py index 23d0537e533..358a8dd76a1 100644 --- a/tests/functional/idempotency/test_idempotency.py +++ b/tests/functional/idempotency/test_idempotency.py @@ -1058,7 +1058,7 @@ def test_custom_jmespath_function_overrides_builtin_functions( config_with_jmespath_options: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer ): # GIVEN a persistence store with a custom jmespath_options - # AND use a builtin powertools custom function + # AND use a builtin Powertools for AWS Lambda (Python) custom function persistence_store.configure(config_with_jmespath_options) with pytest.raises(jmespath.exceptions.UnknownFunctionError, match="Unknown function: powertools_json()"): diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py index e6dbf7ebbb8..f732e41e533 100644 --- a/tests/functional/test_logger.py +++ b/tests/functional/test_logger.py @@ -902,7 +902,7 @@ def test_set_package_logger_handler_with_powertools_debug_env_var(stdout, monkey logger = logging.getLogger("aws_lambda_powertools") # WHEN set_package_logger is used at initialization - # and any Powertools operation is used (e.g., Tracer) + # and any Powertools for AWS Lambda (Python) operation is used (e.g., Tracer) set_package_logger_handler(stream=stdout) Tracer(disabled=True) diff --git a/tests/functional/test_logger_utils.py b/tests/functional/test_logger_utils.py index 0e0c7fc7766..23796b74e6c 100644 --- a/tests/functional/test_logger_utils.py +++ b/tests/functional/test_logger_utils.py @@ -50,13 +50,13 @@ def service_name(): def test_copy_config_to_ext_loggers(stdout, logger, log_level): - # GIVEN two external loggers and powertools logger initialized + # GIVEN two external loggers and Powertools for AWS Lambda (Python) logger initialized logger_1 = logger() logger_2 = logger() powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to ALL external loggers + # WHEN configuration copied from Powertools for AWS Lambda (Python) logger to ALL external loggers # AND external loggers used utils.copy_config_to_registered_loggers(source_logger=powertools_logger) msg = "test message1" @@ -64,7 +64,7 @@ def test_copy_config_to_ext_loggers(stdout, logger, log_level): logger_2.info(msg) logs = capture_multiple_logging_statements_output(stdout) - # THEN all external loggers used Powertools handler, formatter and log level + # THEN all external loggers used Powertools for AWS Lambda (Python) handler, formatter and log level for index, logger in enumerate([logger_1, logger_2]): assert len(logger.handlers) == 1 assert isinstance(logger.handlers[0], logging.StreamHandler) @@ -75,18 +75,18 @@ def test_copy_config_to_ext_loggers(stdout, logger, log_level): def test_copy_config_to_ext_loggers_include(stdout, logger, log_level): - # GIVEN an external logger and powertools logger initialized + # GIVEN an external logger and Powertools for AWS Lambda (Python) logger initialized logger = logger() powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to INCLUDED external loggers + # WHEN configuration copied from Powertools for AWS Lambda (Python) logger to INCLUDED external loggers # AND our external logger used utils.copy_config_to_registered_loggers(source_logger=powertools_logger, include={logger.name}) msg = "test message2" logger.info(msg) log = capture_logging_output(stdout) - # THEN included external loggers used Powertools handler, formatter and log level. + # THEN included external loggers used Powertools for AWS Lambda (Python) handler, formatter and log level. assert len(logger.handlers) == 1 assert isinstance(logger.handlers[0], logging.StreamHandler) assert isinstance(logger.handlers[0].formatter, formatter.LambdaPowertoolsFormatter) @@ -96,11 +96,12 @@ def test_copy_config_to_ext_loggers_include(stdout, logger, log_level): def test_copy_config_to_ext_loggers_wrong_include(stdout, logger, log_level): - # GIVEN an external logger and powertools logger initialized + # GIVEN an external logger and Powertools for AWS Lambda (Python) for AWS Lambda (Python) logger initialized logger = logger() powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to INCLUDED NON EXISTING external loggers + # WHEN configuration copied from Powertools for AWS Lambda (Python) for AWS Lambda (Python) logger + # to INCLUDED NON EXISTING external loggers utils.copy_config_to_registered_loggers(source_logger=powertools_logger, include={"non-existing-logger"}) # THEN existing external logger is not modified @@ -108,11 +109,11 @@ def test_copy_config_to_ext_loggers_wrong_include(stdout, logger, log_level): def test_copy_config_to_ext_loggers_exclude(stdout, logger, log_level): - # GIVEN an external logger and powertools logger initialized + # GIVEN an external logger and Powertools for AWS Lambda (Python) logger initialized logger = logger() powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to ALL BUT external logger + # WHEN configuration copied from Powertools for AWS Lambda (Python) logger to ALL BUT external logger utils.copy_config_to_registered_loggers(source_logger=powertools_logger, exclude={logger.name}) # THEN external logger is not modified @@ -120,13 +121,13 @@ def test_copy_config_to_ext_loggers_exclude(stdout, logger, log_level): def test_copy_config_to_ext_loggers_include_exclude(stdout, logger, log_level): - # GIVEN two external loggers and powertools logger initialized + # GIVEN two external loggers and Powertools for AWS Lambda (Python) logger initialized logger_1 = logger() logger_2 = logger() powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to INCLUDED external loggers + # WHEN configuration copied from Powertools for AWS Lambda (Python) logger to INCLUDED external loggers # AND external logger_1 is also in EXCLUDE list utils.copy_config_to_registered_loggers( source_logger=powertools_logger, include={logger_1.name, logger_2.name}, exclude={logger_1.name} @@ -135,7 +136,8 @@ def test_copy_config_to_ext_loggers_include_exclude(stdout, logger, log_level): logger_2.info(msg) log = capture_logging_output(stdout) - # THEN logger_1 is not modified and Logger_2 used Powertools handler, formatter and log level + # THEN logger_1 is not modified and Logger_2 used Powertools for AWS Lambda (Python) handler, formatter and log + # level assert not logger_1.handlers assert len(logger_2.handlers) == 1 assert isinstance(logger_2.handlers[0], logging.StreamHandler) @@ -146,16 +148,16 @@ def test_copy_config_to_ext_loggers_include_exclude(stdout, logger, log_level): def test_copy_config_to_ext_loggers_clean_old_handlers(stdout, logger, log_level): - # GIVEN an external logger with handler and powertools logger initialized + # GIVEN an external logger with handler and Powertools for AWS Lambda (Python) logger initialized logger = logger() handler = logging.NullHandler() logger.addHandler(handler) powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to ALL external loggers + # WHEN configuration copied from Powertools for AWS Lambda (Python) logger to ALL external loggers utils.copy_config_to_registered_loggers(source_logger=powertools_logger) - # THEN old logger's handler removed and Powertools configuration used instead + # THEN old logger's handler removed and Powertools for AWS Lambda (Python) configuration used instead assert len(logger.handlers) == 1 assert isinstance(logger.handlers[0], logging.StreamHandler) assert isinstance(logger.handlers[0].formatter, formatter.LambdaPowertoolsFormatter) @@ -163,11 +165,11 @@ def test_copy_config_to_ext_loggers_clean_old_handlers(stdout, logger, log_level @pytest.mark.parametrize("level_to_set", ["WARNING", 30]) def test_copy_config_to_ext_loggers_custom_log_level(stdout, logger, log_level, level_to_set): - # GIVEN an external logger and powertools logger initialized + # GIVEN an external logger and Powertools for AWS Lambda (Python) logger initialized logger = logger() powertools_logger = Logger(service=service_name(), level=log_level.CRITICAL.value, stream=stdout) - # WHEN configuration copied from powertools logger to INCLUDED external logger + # WHEN configuration copied from Powertools for AWS Lambda (Python) logger to INCLUDED external logger # AND external logger used with custom log_level utils.copy_config_to_registered_loggers( source_logger=powertools_logger, include={logger.name}, log_level=level_to_set @@ -176,7 +178,7 @@ def test_copy_config_to_ext_loggers_custom_log_level(stdout, logger, log_level, logger.warning(msg) log = capture_logging_output(stdout) - # THEN external logger used Powertools handler, formatter and CUSTOM log level. + # THEN external logger used Powertools for AWS Lambda (Python) handler, formatter and CUSTOM log level. assert len(logger.handlers) == 1 assert isinstance(logger.handlers[0], logging.StreamHandler) assert isinstance(logger.handlers[0].formatter, formatter.LambdaPowertoolsFormatter) @@ -187,10 +189,10 @@ def test_copy_config_to_ext_loggers_custom_log_level(stdout, logger, log_level, def test_copy_config_to_ext_loggers_should_not_break_append_keys(stdout, log_level): - # GIVEN powertools logger initialized + # GIVEN Powertools for AWS Lambda (Python) logger initialized powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to ALL external loggers + # WHEN configuration copied from Powertools for AWS Lambda (Python) logger to ALL external loggers utils.copy_config_to_registered_loggers(source_logger=powertools_logger) # THEN append_keys should not raise an exception @@ -198,8 +200,8 @@ def test_copy_config_to_ext_loggers_should_not_break_append_keys(stdout, log_lev def test_copy_config_to_parent_loggers_only(stdout): - # GIVEN Powertools Logger and Child Logger are initialized - # and Powertools Logger config is copied over + # GIVEN Powertools for AWS Lambda (Python) Logger and Child Logger are initialized + # and Powertools for AWS Lambda (Python) Logger config is copied over service = service_name() child = Logger(stream=stdout, service=service, child=True) parent = Logger(stream=stdout, service=service) @@ -220,8 +222,8 @@ def test_copy_config_to_parent_loggers_only(stdout): def test_copy_config_to_parent_loggers_only_with_exclude(stdout): - # GIVEN Powertools Logger and Child Logger are initialized - # and Powertools Logger config is copied over with exclude set + # GIVEN Powertools for AWS Lambda (Python) Logger and Child Logger are initialized + # and Powertools for AWS Lambda (Python) Logger config is copied over with exclude set service = service_name() child = Logger(stream=stdout, service=service, child=True) parent = Logger(stream=stdout, service=service) @@ -242,7 +244,7 @@ def test_copy_config_to_parent_loggers_only_with_exclude(stdout): def test_copy_config_to_ext_loggers_no_duplicate_logs(stdout, logger, log_level): - # GIVEN an root logger, external logger and powertools logger initialized + # GIVEN an root logger, external logger and Powertools for AWS Lambda (Python) logger initialized root_logger = logging.getLogger() handler = logging.StreamHandler(stdout) @@ -255,7 +257,7 @@ def test_copy_config_to_ext_loggers_no_duplicate_logs(stdout, logger, log_level) powertools_logger = Logger(service=service_name(), level=log_level.CRITICAL.value, stream=stdout) level = log_level.WARNING.name - # WHEN configuration copied from powertools logger + # WHEN configuration copied from Powertools for AWS Lambda (Python) logger # AND external logger used with custom log_level utils.copy_config_to_registered_loggers(source_logger=powertools_logger, include={logger.name}, log_level=level) msg = "test message4" @@ -268,14 +270,14 @@ def test_copy_config_to_ext_loggers_no_duplicate_logs(stdout, logger, log_level) def test_logger_name_is_included_during_copy(stdout, logger, log_level): - # GIVEN two external loggers and powertools logger initialized + # GIVEN two external loggers and Powertools for AWS Lambda (Python) logger initialized logger_1: logging.Logger = logger() logger_2: logging.Logger = logger() msg = "test message1" powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to ALL external loggers + # WHEN configuration copied from Powertools for AWS Lambda (Python) logger to ALL external loggers # AND external loggers used utils.copy_config_to_registered_loggers(source_logger=powertools_logger, include={logger_1.name, logger_2.name}) logger_1.info(msg) From 9e0f15cf51d4c26cf3df21dc3a982fd07c1bb364 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 31 May 2023 10:08:56 +0200 Subject: [PATCH 70/76] chore(deps): bump typing-extensions from 4.5.0 to 4.6.2 (#2345) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index a58279dc584..9566e042ace 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2877,14 +2877,14 @@ files = [ [[package]] name = "typing-extensions" -version = "4.5.0" +version = "4.6.2" description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, - {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, + {file = "typing_extensions-4.6.2-py3-none-any.whl", hash = "sha256:3a8b36f13dd5fdc5d1b16fe317f5668545de77fa0b8e02006381fd49d731ab98"}, + {file = "typing_extensions-4.6.2.tar.gz", hash = "sha256:06006244c70ac8ee83fa8282cb188f697b8db25bc8b4df07be1873c43897060c"}, ] [[package]] @@ -3087,4 +3087,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "f916179bcebcb9eb8b8e787550f39b750aa673ff28c511041048fd939d44eef2" +content-hash = "04524bd804a7a4b8343c67fb59b96bdc11ada96de02123e37bd16b3f0db82044" diff --git a/pyproject.toml b/pyproject.toml index 75e7e4d23a7..81fc80567cd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ aws-xray-sdk = { version = "^2.8.0", optional = true } fastjsonschema = { version = "^2.14.5", optional = true } pydantic = { version = "^1.8.2", optional = true } boto3 = { version = "^1.20.32", optional = true } -typing-extensions = "^4.4.0" +typing-extensions = "^4.6.2" [tool.poetry.dev-dependencies] coverage = {extras = ["toml"], version = "^7.2"} @@ -81,7 +81,7 @@ mypy-boto3-ssm = "^1.26.97" mypy-boto3-s3 = "^1.26.127" mypy-boto3-xray = "^1.26.122" types-requests = "^2.31.0" -typing-extensions = "^4.4.0" +typing-extensions = "^4.6.2" mkdocs-material = "^9.1.15" filelock = "^3.12.0" checksumdir = "^1.2.0" From 03e64b1b0a1a5cf4b133d8e36da9f184aaca985f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 31 May 2023 22:00:27 +0100 Subject: [PATCH 71/76] chore(deps): bump aws-actions/configure-aws-credentials from 2.0.0 to 2.1.0 (#2350) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/dispatch_analytics.yml | 2 +- .github/workflows/reusable_deploy_v2_layer_stack.yml | 2 +- .github/workflows/reusable_deploy_v2_sar.yml | 4 ++-- .github/workflows/reusable_publish_docs.yml | 2 +- .github/workflows/run-e2e-tests.yml | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/dispatch_analytics.yml b/.github/workflows/dispatch_analytics.yml index d307ef62326..2f7c2f5c8a3 100644 --- a/.github/workflows/dispatch_analytics.yml +++ b/.github/workflows/dispatch_analytics.yml @@ -30,7 +30,7 @@ jobs: environment: analytics steps: - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 + uses: aws-actions/configure-aws-credentials@5727f247b64f324ec403ac56ae05e220fd02b65f # v2.1.0 with: aws-region: eu-central-1 role-to-assume: ${{ secrets.AWS_ANALYTICS_ROLE_ARN }} diff --git a/.github/workflows/reusable_deploy_v2_layer_stack.yml b/.github/workflows/reusable_deploy_v2_layer_stack.yml index 58e4a26f75f..425b8a092f3 100644 --- a/.github/workflows/reusable_deploy_v2_layer_stack.yml +++ b/.github/workflows/reusable_deploy_v2_layer_stack.yml @@ -99,7 +99,7 @@ jobs: - name: Install poetry run: pipx install poetry - name: aws credentials - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 + uses: aws-actions/configure-aws-credentials@5727f247b64f324ec403ac56ae05e220fd02b65f # v2.1.0 with: aws-region: ${{ matrix.region }} role-to-assume: ${{ secrets.AWS_LAYERS_ROLE_ARN }} diff --git a/.github/workflows/reusable_deploy_v2_sar.yml b/.github/workflows/reusable_deploy_v2_sar.yml index 4ca28543f24..d9e2f046e9a 100644 --- a/.github/workflows/reusable_deploy_v2_sar.yml +++ b/.github/workflows/reusable_deploy_v2_sar.yml @@ -50,12 +50,12 @@ jobs: - name: Checkout uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: AWS credentials - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 + uses: aws-actions/configure-aws-credentials@5727f247b64f324ec403ac56ae05e220fd02b65f # v2.1.0 with: aws-region: ${{ env.AWS_REGION }} role-to-assume: ${{ secrets.AWS_LAYERS_ROLE_ARN }} - name: AWS credentials SAR role - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 + uses: aws-actions/configure-aws-credentials@5727f247b64f324ec403ac56ae05e220fd02b65f # v2.1.0 id: aws-credentials-sar-role with: aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} diff --git a/.github/workflows/reusable_publish_docs.yml b/.github/workflows/reusable_publish_docs.yml index fdc87524fac..c5d4fd2dfcc 100644 --- a/.github/workflows/reusable_publish_docs.yml +++ b/.github/workflows/reusable_publish_docs.yml @@ -91,7 +91,7 @@ jobs: keep_files: true destination_dir: latest/api - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef + uses: aws-actions/configure-aws-credentials@5727f247b64f324ec403ac56ae05e220fd02b65f with: aws-region: us-east-1 role-to-assume: ${{ secrets.AWS_DOCS_ROLE_ARN }} diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index 2b4f7c05459..dbdc9b1849a 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -54,7 +54,7 @@ jobs: - name: Install dependencies run: make dev - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0 + uses: aws-actions/configure-aws-credentials@5727f247b64f324ec403ac56ae05e220fd02b65f # v2.1.0 with: role-to-assume: ${{ secrets.AWS_TEST_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} From 5a365047975fd7ea32e32d6d69d9b6ff5aa8b6f8 Mon Sep 17 00:00:00 2001 From: Roger Zhang <roger.zhang.cs@gmail.com> Date: Thu, 1 Jun 2023 01:53:37 -0700 Subject: [PATCH 72/76] feat(user-agent): add custom header User-Agent to AWS SDK requests (#2267) Co-authored-by: Leandro Damascena <leandro.damascena@gmail.com> --- aws_lambda_powertools/__init__.py | 13 +- aws_lambda_powertools/shared/user_agent.py | 165 ++++++++++++++++++ aws_lambda_powertools/shared/version.py | 16 ++ .../data_classes/code_pipeline_job_event.py | 5 +- .../idempotency/persistence/dynamodb.py | 4 +- .../utilities/parameters/base.py | 12 +- .../utilities/streaming/_s3_seekable_io.py | 5 + mypy.ini | 3 +- pyproject.toml | 2 +- 9 files changed, 214 insertions(+), 11 deletions(-) create mode 100644 aws_lambda_powertools/shared/user_agent.py create mode 100644 aws_lambda_powertools/shared/version.py diff --git a/aws_lambda_powertools/__init__.py b/aws_lambda_powertools/__init__.py index 574c9b257f1..14237bc7119 100644 --- a/aws_lambda_powertools/__init__.py +++ b/aws_lambda_powertools/__init__.py @@ -4,11 +4,14 @@ from pathlib import Path -from .logging import Logger -from .metrics import Metrics, single_metric -from .package_logger import set_package_logger_handler -from .tracing import Tracer +from aws_lambda_powertools.logging import Logger +from aws_lambda_powertools.metrics import Metrics, single_metric +from aws_lambda_powertools.package_logger import set_package_logger_handler +from aws_lambda_powertools.shared.user_agent import inject_user_agent +from aws_lambda_powertools.shared.version import VERSION +from aws_lambda_powertools.tracing import Tracer +__version__ = VERSION __author__ = """Amazon Web Services""" __all__ = [ "Logger", @@ -20,3 +23,5 @@ PACKAGE_PATH = Path(__file__).parent set_package_logger_handler() + +inject_user_agent() diff --git a/aws_lambda_powertools/shared/user_agent.py b/aws_lambda_powertools/shared/user_agent.py new file mode 100644 index 00000000000..62cdc16601d --- /dev/null +++ b/aws_lambda_powertools/shared/user_agent.py @@ -0,0 +1,165 @@ +import logging +import os + +from aws_lambda_powertools.shared.version import VERSION + +powertools_version = VERSION +inject_header = True + +try: + import botocore +except ImportError: + # if botocore failed to import, user might be using custom runtime and we can't inject header + inject_header = False + +logger = logging.getLogger(__name__) + +EXEC_ENV = os.environ.get("AWS_EXECUTION_ENV", "NA") +TARGET_SDK_EVENT = "request-created" +FEATURE_PREFIX = "PT" +DEFAULT_FEATURE = "no-op" +HEADER_NO_OP = f"{FEATURE_PREFIX}/{DEFAULT_FEATURE}/{powertools_version} PTEnv/{EXEC_ENV}" + + +def _initializer_botocore_session(session): + """ + This function is used to add an extra header for the User-Agent in the Botocore session, + as described in the pull request: https://github.com/boto/botocore/pull/2682 + + Parameters + ---------- + session : botocore.session.Session + The Botocore session to which the user-agent function will be registered. + + Raises + ------ + Exception + If there is an issue while adding the extra header for the User-Agent. + + """ + try: + session.register(TARGET_SDK_EVENT, _create_feature_function(DEFAULT_FEATURE)) + except Exception: + logger.debug("Can't add extra header User-Agent") + + +def _create_feature_function(feature): + """ + Create and return the `add_powertools_feature` function. + + The `add_powertools_feature` function is designed to be registered in boto3's event system. + When registered, it appends the given feature string to the User-Agent header of AWS SDK requests. + + Parameters + ---------- + feature : str + The feature string to be appended to the User-Agent header. + + Returns + ------- + add_powertools_feature : Callable + The `add_powertools_feature` function that modifies the User-Agent header. + + + """ + + def add_powertools_feature(request, **kwargs): + try: + headers = request.headers + header_user_agent = ( + f"{headers['User-Agent']} {FEATURE_PREFIX}/{feature}/{powertools_version} PTEnv/{EXEC_ENV}" + ) + + # This function is exclusive to client and resources objects created in Powertools + # and must remove the no-op header, if present + if HEADER_NO_OP in headers["User-Agent"] and feature != DEFAULT_FEATURE: + # Remove HEADER_NO_OP + space + header_user_agent = header_user_agent.replace(f"{HEADER_NO_OP} ", "") + + headers["User-Agent"] = f"{header_user_agent}" + except Exception: + logger.debug("Can't find User-Agent header") + + return add_powertools_feature + + +# Add feature user-agent to given sdk boto3.session +def register_feature_to_session(session, feature): + """ + Register the given feature string to the event system of the provided boto3 session + and append the feature to the User-Agent header of the request + + Parameters + ---------- + session : boto3.session.Session + The boto3 session to which the feature will be registered. + feature : str + The feature string to be appended to the User-Agent header, e.g., "streaming" in Powertools. + + Raises + ------ + AttributeError + If the provided session does not have an event system. + + """ + try: + session.events.register(TARGET_SDK_EVENT, _create_feature_function(feature)) + except AttributeError as e: + logger.debug(f"session passed in doesn't have a event system:{e}") + + +# Add feature user-agent to given sdk boto3.client +def register_feature_to_client(client, feature): + """ + Register the given feature string to the event system of the provided boto3 client + and append the feature to the User-Agent header of the request + + Parameters + ---------- + client : boto3.session.Session.client + The boto3 client to which the feature will be registered. + feature : str + The feature string to be appended to the User-Agent header, e.g., "streaming" in Powertools. + + Raises + ------ + AttributeError + If the provided client does not have an event system. + + """ + try: + client.meta.events.register(TARGET_SDK_EVENT, _create_feature_function(feature)) + except AttributeError as e: + logger.debug(f"session passed in doesn't have a event system:{e}") + + +# Add feature user-agent to given sdk boto3.resource +def register_feature_to_resource(resource, feature): + """ + Register the given feature string to the event system of the provided boto3 resource + and append the feature to the User-Agent header of the request + + Parameters + ---------- + resource : boto3.session.Session.resource + The boto3 resource to which the feature will be registered. + feature : str + The feature string to be appended to the User-Agent header, e.g., "streaming" in Powertools. + + Raises + ------ + AttributeError + If the provided resource does not have an event system. + + """ + try: + resource.meta.client.meta.events.register(TARGET_SDK_EVENT, _create_feature_function(feature)) + except AttributeError as e: + logger.debug(f"resource passed in doesn't have a event system:{e}") + + +def inject_user_agent(): + if inject_header: + # Customize botocore session to inject Powertools header + # See: https://github.com/boto/botocore/pull/2682 + botocore.register_initializer(_initializer_botocore_session) diff --git a/aws_lambda_powertools/shared/version.py b/aws_lambda_powertools/shared/version.py new file mode 100644 index 00000000000..0db71627089 --- /dev/null +++ b/aws_lambda_powertools/shared/version.py @@ -0,0 +1,16 @@ +""" + This file serves to create a constant that informs + the current version of the Powertools package and exposes it in the main module + + Since Python 3.8 there the built-in importlib.metadata + When support for Python3.7 is dropped, we can remove the optional importlib_metadata dependency + See: https://docs.python.org/3/library/importlib.metadata.html +""" +import sys + +if sys.version_info >= (3, 8): + from importlib.metadata import version +else: + from importlib_metadata import version + +VERSION = version("aws-lambda-powertools") diff --git a/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py b/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py index c502aacb090..434df509deb 100644 --- a/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py +++ b/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py @@ -6,6 +6,7 @@ import boto3 +from aws_lambda_powertools.shared import user_agent from aws_lambda_powertools.utilities.data_classes.common import DictWrapper @@ -203,12 +204,14 @@ def setup_s3_client(self): BaseClient An S3 client with the appropriate credentials """ - return boto3.client( + s3 = boto3.client( "s3", aws_access_key_id=self.data.artifact_credentials.access_key_id, aws_secret_access_key=self.data.artifact_credentials.secret_access_key, aws_session_token=self.data.artifact_credentials.session_token, ) + user_agent.register_feature_to_client(client=s3, feature="data_classes") + return s3 def find_input_artifact(self, artifact_name: str) -> Optional[CodePipelineArtifact]: """Find an input artifact by artifact name diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py index 654f8ca99d4..d69e42f9287 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py @@ -10,7 +10,7 @@ from botocore.config import Config from botocore.exceptions import ClientError -from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.shared import constants, user_agent from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyItemAlreadyExistsError, @@ -94,6 +94,8 @@ def __init__( else: self.client = boto3_client + user_agent.register_feature_to_client(client=self.client, feature="idempotency") + if sort_key_attr == key_attr: raise ValueError(f"key_attr [{key_attr}] and sort_key_attr [{sort_key_attr}] cannot be the same!") diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py index 8ec1052ae37..4357b5d520e 100644 --- a/aws_lambda_powertools/utilities/parameters/base.py +++ b/aws_lambda_powertools/utilities/parameters/base.py @@ -25,7 +25,7 @@ import boto3 from botocore.config import Config -from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.shared import constants, user_agent from aws_lambda_powertools.shared.functions import resolve_max_age from aws_lambda_powertools.utilities.parameters.types import TransformOptions @@ -254,11 +254,14 @@ def _build_boto3_client( Instance of a boto3 client for Parameters feature (e.g., ssm, appconfig, secretsmanager, etc.) """ if client is not None: + user_agent.register_feature_to_client(client=client, feature="parameters") return client session = session or boto3.Session() config = config or Config() - return session.client(service_name=service_name, config=config) + client = session.client(service_name=service_name, config=config) + user_agent.register_feature_to_client(client=client, feature="parameters") + return client # maintenance: change DynamoDBServiceResource type to ParameterResourceClients when we expand @staticmethod @@ -288,11 +291,14 @@ def _build_boto3_resource_client( Instance of a boto3 resource client for Parameters feature (e.g., dynamodb, etc.) """ if client is not None: + user_agent.register_feature_to_resource(resource=client, feature="parameters") return client session = session or boto3.Session() config = config or Config() - return session.resource(service_name=service_name, config=config, endpoint_url=endpoint_url) + client = session.resource(service_name=service_name, config=config, endpoint_url=endpoint_url) + user_agent.register_feature_to_resource(resource=client, feature="parameters") + return client def get_transform_method(value: str, transform: TransformOptions = None) -> Callable[..., Any]: diff --git a/aws_lambda_powertools/utilities/streaming/_s3_seekable_io.py b/aws_lambda_powertools/utilities/streaming/_s3_seekable_io.py index 8e280f3f7d7..de9b77410a3 100644 --- a/aws_lambda_powertools/utilities/streaming/_s3_seekable_io.py +++ b/aws_lambda_powertools/utilities/streaming/_s3_seekable_io.py @@ -15,6 +15,7 @@ import boto3 +from aws_lambda_powertools.shared import user_agent from aws_lambda_powertools.utilities.streaming.compat import PowertoolsStreamingBody if TYPE_CHECKING: @@ -67,6 +68,7 @@ def __init__( self._sdk_options = sdk_options self._sdk_options["Bucket"] = bucket self._sdk_options["Key"] = key + self._has_user_agent = False if version_id is not None: self._sdk_options["VersionId"] = version_id @@ -77,6 +79,9 @@ def s3_client(self) -> "Client": """ if self._s3_client is None: self._s3_client = boto3.client("s3") + if not self._has_user_agent: + user_agent.register_feature_to_client(client=self._s3_client, feature="streaming") + self._has_user_agent = True return self._s3_client @property diff --git a/mypy.ini b/mypy.ini index 4af89217fdc..2b50293b561 100644 --- a/mypy.ini +++ b/mypy.ini @@ -63,4 +63,5 @@ ignore_missing_imports = True [mypy-ijson] ignore_missing_imports = True - +[mypy-importlib.metadata] +ignore_missing_imports = True diff --git a/pyproject.toml b/pyproject.toml index 81fc80567cd..07fd2c28f97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,7 @@ aws-xray-sdk = { version = "^2.8.0", optional = true } fastjsonschema = { version = "^2.14.5", optional = true } pydantic = { version = "^1.8.2", optional = true } boto3 = { version = "^1.20.32", optional = true } +importlib-metadata = {version = "^6.6.0", python = "<3.8"} typing-extensions = "^4.6.2" [tool.poetry.dev-dependencies] @@ -86,7 +87,6 @@ mkdocs-material = "^9.1.15" filelock = "^3.12.0" checksumdir = "^1.2.0" mypy-boto3-appconfigdata = "^1.26.70" -importlib-metadata = "^6.6" ijson = "^3.2.0" typed-ast = { version = "^1.5.4", python = "< 3.8"} hvac = "^1.1.0" From 719cdc0e3e9cf09da7592bc6a393554c1acd6349 Mon Sep 17 00:00:00 2001 From: Darnley Costa <darnley@outlook.com> Date: Thu, 1 Jun 2023 07:20:52 -0300 Subject: [PATCH 73/76] fix(event_source): change the import location of boto3 in CodePipelineJobEvent data class (#2353) Co-authored-by: Leandro Damascena <leandro.damascena@gmail.com> --- .../utilities/data_classes/code_pipeline_job_event.py | 9 ++++++--- docs/core/logger.md | 1 + 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py b/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py index 434df509deb..96c209e0eca 100644 --- a/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py +++ b/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py @@ -4,9 +4,6 @@ from typing import Any, Dict, List, Optional from urllib.parse import unquote_plus -import boto3 - -from aws_lambda_powertools.shared import user_agent from aws_lambda_powertools.utilities.data_classes.common import DictWrapper @@ -204,6 +201,12 @@ def setup_s3_client(self): BaseClient An S3 client with the appropriate credentials """ + # IMPORTING boto3 within the FUNCTION and not at the top level to get + # it only when we explicitly want it for better performance. + import boto3 + + from aws_lambda_powertools.shared import user_agent + s3 = boto3.client( "s3", aws_access_key_id=self.data.artifact_credentials.access_key_id, diff --git a/docs/core/logger.md b/docs/core/logger.md index 064328dabb0..305a7cef0f3 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -741,6 +741,7 @@ Here's an example where we persist `payment_id` not `request_id`. Note that `pay ---8<-- "examples/logger/src/append_keys_vs_extra_output.json" ``` +<!-- markdownlint-disable MD013 --> ### How do I aggregate and search Powertools for AWS Lambda (Python) logs across accounts? As of now, ElasticSearch (ELK) or 3rd party solutions are best suited to this task. Please refer to this [discussion for more details](https://github.com/awslabs/aws-lambda-powertools-python/issues/460) From 0b4f866045646a6ff0015c8e36966ea357641787 Mon Sep 17 00:00:00 2001 From: Heitor Lessa <lessa@amazon.co.uk> Date: Thu, 1 Jun 2023 12:40:47 +0200 Subject: [PATCH 74/76] refactor(logger): remove subclassing and move unnecessary APIs (#2334) Co-authored-by: Leandro Damascena <leandro.damascena@gmail.com> --- aws_lambda_powertools/logging/compat.py | 51 +++++++++ aws_lambda_powertools/logging/logger.py | 145 ++++++++---------------- aws_lambda_powertools/logging/utils.py | 2 +- 3 files changed, 102 insertions(+), 96 deletions(-) create mode 100644 aws_lambda_powertools/logging/compat.py diff --git a/aws_lambda_powertools/logging/compat.py b/aws_lambda_powertools/logging/compat.py new file mode 100644 index 00000000000..5e1dbd72e28 --- /dev/null +++ b/aws_lambda_powertools/logging/compat.py @@ -0,0 +1,51 @@ +"""Maintenance: We can drop this upon Py3.7 EOL. It's a backport for "location" key to work.""" +from __future__ import annotations + +import io +import logging +import os +import traceback + + +def findCaller(stack_info=False, stacklevel=2): # pragma: no cover + """ + Find the stack frame of the caller so that we can note the source + file name, line number and function name. + """ + f = logging.currentframe() # noqa: VNE001 + # On some versions of IronPython, currentframe() returns None if + # IronPython isn't run with -X:Frames. + if f is None: + return "(unknown file)", 0, "(unknown function)", None + while stacklevel > 0: + next_f = f.f_back + if next_f is None: + ## We've got options here. + ## If we want to use the last (deepest) frame: + break + ## If we want to mimic the warnings module: + # return ("sys", 1, "(unknown function)", None) # noqa: E800 + ## If we want to be pedantic: # noqa: E800 + # raise ValueError("call stack is not deep enough") # noqa: E800 + f = next_f # noqa: VNE001 + if not _is_internal_frame(f): + stacklevel -= 1 + co = f.f_code + sinfo = None + if stack_info: + with io.StringIO() as sio: + sio.write("Stack (most recent call last):\n") + traceback.print_stack(f, file=sio) + sinfo = sio.getvalue() + if sinfo[-1] == "\n": + sinfo = sinfo[:-1] + return co.co_filename, f.f_lineno, co.co_name, sinfo + + +# The following is based on warnings._is_internal_frame. It makes sure that +# frames of the import mechanism are skipped when logging at module level and +# using a stacklevel value greater than one. +def _is_internal_frame(frame): # pragma: no cover + """Signal whether the frame is a CPython or logging module internal.""" + filename = os.path.normcase(frame.f_code.co_filename) + return filename == logging._srcfile or ("importlib" in filename and "_bootstrap" in filename) diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index 201cb942fdd..a529510ba01 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -2,12 +2,10 @@ import functools import inspect -import io import logging import os import random import sys -import traceback from typing import ( IO, TYPE_CHECKING, @@ -25,6 +23,8 @@ import jmespath +from aws_lambda_powertools.logging import compat + from ..shared import constants from ..shared.functions import ( extract_event_from_common_models, @@ -66,12 +66,7 @@ def _is_cold_start() -> bool: return cold_start -# PyCharm does not support autocomplete via getattr -# so we need to return to subclassing removed in #97 -# All methods/properties continue to be proxied to inner logger -# https://github.com/awslabs/aws-lambda-powertools-python/issues/107 -# noinspection PyRedeclaration -class Logger(logging.Logger): # lgtm [py/missing-call-to-init] +class Logger: """Creates and setups a logger to format statements in JSON. Includes service name and any additional key=value into logs @@ -238,7 +233,6 @@ def __init__( self.logger_handler = logger_handler or logging.StreamHandler(stream) self.log_uncaught_exceptions = log_uncaught_exceptions - self.log_level = self._get_log_level(level) self._is_deduplication_disabled = resolve_truthy_env_var_choice( env=os.getenv(constants.LOGGER_LOG_DEDUPLICATION_ENV, "false") ) @@ -258,7 +252,7 @@ def __init__( "use_rfc3339": use_rfc3339, } - self._init_logger(formatter_options=formatter_options, **kwargs) + self._init_logger(formatter_options=formatter_options, log_level=level, **kwargs) if self.log_uncaught_exceptions: logger.debug("Replacing exception hook") @@ -277,11 +271,11 @@ def _get_logger(self): """Returns a Logger named {self.service}, or {self.service.filename} for child loggers""" logger_name = self.service if self.child: - logger_name = f"{self.service}.{self._get_caller_filename()}" + logger_name = f"{self.service}.{_get_caller_filename()}" return logging.getLogger(logger_name) - def _init_logger(self, formatter_options: Optional[Dict] = None, **kwargs): + def _init_logger(self, formatter_options: Optional[Dict] = None, log_level: Union[str, int, None] = None, **kwargs): """Configures new logger""" # Skip configuration if it's a child logger or a pre-configured logger @@ -293,13 +287,13 @@ def _init_logger(self, formatter_options: Optional[Dict] = None, **kwargs): if self.child or is_logger_preconfigured: return + self._logger.setLevel(self._determine_log_level(log_level)) self._configure_sampling() - self._logger.setLevel(self.log_level) self._logger.addHandler(self.logger_handler) self.structure_logs(formatter_options=formatter_options, **kwargs) # Maintenance: We can drop this upon Py3.7 EOL. It's a backport for "location" key to work - self._logger.findCaller = self.findCaller + self._logger.findCaller = compat.findCaller # Pytest Live Log feature duplicates log records for colored output # but we explicitly add a filter for log deduplication. @@ -329,7 +323,7 @@ def _configure_sampling(self): try: if self.sampling_rate and random.random() <= float(self.sampling_rate): logger.debug("Setting log level to Debug due to sampling rate") - self.setLevel(logging.DEBUG) + self._logger.setLevel(logging.DEBUG) except ValueError: raise InvalidLoggerSamplingRateError( f"Expected a float value ranging 0 to 1, but received {self.sampling_rate} instead." @@ -445,19 +439,6 @@ def decorate(event, context, *args, **kwargs): return decorate - def setLevel(self, level: Union[str, int]): - """ - Set the logging level for the logger. - - Parameters: - ----------- - level str | int - The level to set. Can be a string representing the level name: 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL' - or an integer representing the level value: 10 for 'DEBUG', 20 for 'INFO', 30 for 'WARNING', 40 for 'ERROR', 50 for 'CRITICAL'. # noqa: E501 - """ - self.log_level = level - self._logger.setLevel(level) - def info( self, msg: object, @@ -584,17 +565,6 @@ def append_keys(self, **additional_keys): def remove_keys(self, keys: Iterable[str]): self.registered_formatter.remove_keys(keys) - @property - def registered_handler(self) -> logging.Handler: - """Convenience property to access logger handler""" - handlers = self._logger.parent.handlers if self.child else self._logger.handlers - return handlers[0] - - @property - def registered_formatter(self) -> BasePowertoolsFormatter: - """Convenience property to access logger formatter""" - return self.registered_handler.formatter # type: ignore - def structure_logs(self, append: bool = False, formatter_options: Optional[Dict] = None, **keys): """Sets logging formatting to JSON. @@ -663,8 +633,38 @@ def get_correlation_id(self) -> Optional[str]: return self.registered_formatter.log_format.get("correlation_id") return None + @property + def registered_handler(self) -> logging.Handler: + """Convenience property to access the first logger handler""" + handlers = self._logger.parent.handlers if self.child else self._logger.handlers + return handlers[0] + + @property + def registered_formatter(self) -> BasePowertoolsFormatter: + """Convenience property to access the first logger formatter""" + return self.registered_handler.formatter # type: ignore[return-value] + + @property + def log_level(self) -> int: + return self._logger.level + + @property + def name(self) -> str: + return self._logger.name + + @property + def handlers(self) -> List[logging.Handler]: + """List of registered logging handlers + + Notes + ----- + + Looking for the first configured handler? Use registered_handler property instead. + """ + return self._logger.handlers + @staticmethod - def _get_log_level(level: Union[str, int, None]) -> Union[str, int]: + def _determine_log_level(level: Union[str, int, None]) -> Union[str, int]: """Returns preferred log level set by the customer in upper case""" if isinstance(level, int): return level @@ -675,51 +675,6 @@ def _get_log_level(level: Union[str, int, None]) -> Union[str, int]: return log_level.upper() - @staticmethod - def _get_caller_filename(): - """Return caller filename by finding the caller frame""" - # Current frame => _get_logger() - # Previous frame => logger.py - # Before previous frame => Caller - frame = inspect.currentframe() - caller_frame = frame.f_back.f_back.f_back - return caller_frame.f_globals["__name__"] - - # Maintenance: We can drop this upon Py3.7 EOL. It's a backport for "location" key to work - def findCaller(self, stack_info=False, stacklevel=2): # pragma: no cover - """ - Find the stack frame of the caller so that we can note the source - file name, line number and function name. - """ - f = logging.currentframe() # noqa: VNE001 - # On some versions of IronPython, currentframe() returns None if - # IronPython isn't run with -X:Frames. - if f is None: - return "(unknown file)", 0, "(unknown function)", None - while stacklevel > 0: - next_f = f.f_back - if next_f is None: - ## We've got options here. - ## If we want to use the last (deepest) frame: - break - ## If we want to mimic the warnings module: - # return ("sys", 1, "(unknown function)", None) # noqa: E800 - ## If we want to be pedantic: # noqa: E800 - # raise ValueError("call stack is not deep enough") # noqa: E800 - f = next_f # noqa: VNE001 - if not _is_internal_frame(f): - stacklevel -= 1 - co = f.f_code - sinfo = None - if stack_info: - with io.StringIO() as sio: - sio.write("Stack (most recent call last):\n") - traceback.print_stack(f, file=sio) - sinfo = sio.getvalue() - if sinfo[-1] == "\n": - sinfo = sinfo[:-1] - return co.co_filename, f.f_lineno, co.co_name, sinfo - def set_package_logger( level: Union[str, int] = logging.DEBUG, @@ -760,16 +715,16 @@ def set_package_logger( logger.addHandler(handler) -# Maintenance: We can drop this upon Py3.7 EOL. It's a backport for "location" key to work -# The following is based on warnings._is_internal_frame. It makes sure that -# frames of the import mechanism are skipped when logging at module level and -# using a stacklevel value greater than one. -def _is_internal_frame(frame): # pragma: no cover - """Signal whether the frame is a CPython or logging module internal.""" - filename = os.path.normcase(frame.f_code.co_filename) - return filename == logging._srcfile or ("importlib" in filename and "_bootstrap" in filename) - - def log_uncaught_exception_hook(exc_type, exc_value, exc_traceback, logger: Logger): """Callback function for sys.excepthook to use Logger to log uncaught exceptions""" logger.exception(exc_value, exc_info=(exc_type, exc_value, exc_traceback)) # pragma: no cover + + +def _get_caller_filename(): + """Return caller filename by finding the caller frame""" + # Current frame => _get_logger() + # Previous frame => logger.py + # Before previous frame => Caller + frame = inspect.currentframe() + caller_frame = frame.f_back.f_back.f_back + return caller_frame.f_globals["__name__"] diff --git a/aws_lambda_powertools/logging/utils.py b/aws_lambda_powertools/logging/utils.py index 6a62a79055f..4066011f7f2 100644 --- a/aws_lambda_powertools/logging/utils.py +++ b/aws_lambda_powertools/logging/utils.py @@ -25,7 +25,7 @@ def copy_config_to_registered_loggers( exclude : Optional[Set[str]], optional List of logger names to exclude, by default None """ - level = log_level or source_logger.level + level = log_level or source_logger.log_level # Assumptions: Only take parent loggers not children (dot notation rule) # Steps: From 5aece716496e1ce4220b2f2dcb399eceaefc976b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Jun 2023 22:16:21 +0100 Subject: [PATCH 75/76] chore(deps-dev): bump cfn-lint from 0.77.5 to 0.77.6 (#2360) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 22 +++++++++++----------- pyproject.toml | 2 +- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9566e042ace..777650dc2a3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -189,14 +189,14 @@ requests = ">=0.14.0" [[package]] name = "aws-sam-translator" -version = "1.66.0" +version = "1.68.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" category = "dev" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "aws-sam-translator-1.66.0.tar.gz", hash = "sha256:0b9e9684ea0384fd84f5e722f7fea61896c514b95d3403aa782b69acd485dbbf"}, - {file = "aws_sam_translator-1.66.0-py3-none-any.whl", hash = "sha256:dc4f38cd7ce2a4875d943bf10ba0745901a3a7b7fec1e40b8d13072641630c58"}, + {file = "aws-sam-translator-1.68.0.tar.gz", hash = "sha256:d12a7bb3909142d32458f76818cb96a5ebc5f50fbd5943301d552679a893afcc"}, + {file = "aws_sam_translator-1.68.0-py3-none-any.whl", hash = "sha256:557d8080c9e6c1c609bfe806ea9545f7ea34144e2466c0ddc801806c2c05afdc"}, ] [package.dependencies] @@ -206,7 +206,7 @@ pydantic = ">=1.8,<2.0" typing-extensions = ">=4.4,<5" [package.extras] -dev = ["black (==23.1.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.0.0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "mypy (>=1.1.0,<1.2.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.261)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] +dev = ["black (==23.1.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.0.0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.1.0,<1.2.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.263)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] [[package]] name = "aws-xray-sdk" @@ -370,18 +370,18 @@ files = [ [[package]] name = "cfn-lint" -version = "0.77.5" +version = "0.77.6" description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" category = "dev" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "cfn-lint-0.77.5.tar.gz", hash = "sha256:4282d13ffe76a5dee6431b1f56e3641d87c28b1ef5be663afe7d8dbf13f28bdb"}, - {file = "cfn_lint-0.77.5-py3-none-any.whl", hash = "sha256:b5126dffb834078a71341090d49669046076c09196f0d2bdca68dbace1bf357a"}, + {file = "cfn-lint-0.77.6.tar.gz", hash = "sha256:84a07ae2f9af9577ab09a0f79684272976ba155687093d94c4b8ca9e253e07c7"}, + {file = "cfn_lint-0.77.6-py3-none-any.whl", hash = "sha256:22d891e1ac21fc1aa7ea6c02dd8739224a8b2ed27b602a84611f3837002ae239"}, ] [package.dependencies] -aws-sam-translator = ">=1.65.0" +aws-sam-translator = ">=1.68.0" jschema-to-python = ">=1.2.3,<1.3.0" jsonpatch = "*" jsonschema = ">=3.0,<5" @@ -1102,7 +1102,7 @@ files = [ name = "importlib-metadata" version = "6.6.0" description = "Read metadata from Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3065,7 +3065,7 @@ requests = ">=2.0,<3.0" name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3087,4 +3087,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "04524bd804a7a4b8343c67fb59b96bdc11ada96de02123e37bd16b3f0db82044" +content-hash = "5f0f3b40a4b7a8dee3008f7739c488a10102751142042c1a6ce3d3e63977d274" diff --git a/pyproject.toml b/pyproject.toml index 07fd2c28f97..254fdaf1da5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -101,7 +101,7 @@ all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] aws-sdk = ["boto3"] [tool.poetry.group.dev.dependencies] -cfn-lint = "0.77.5" +cfn-lint = "0.77.6" mypy = "^1.1.1" types-python-dateutil = "^2.8.19.6" httpx = ">=0.23.3,<0.25.0" From a4814a219317390d5e0a46199499ad4530de3ee1 Mon Sep 17 00:00:00 2001 From: "Powertools for AWS Lambda (Python) bot" <aws-lambda-powertools-feedback@amazon.com> Date: Fri, 2 Jun 2023 09:53:05 +0000 Subject: [PATCH 76/76] chore: version bump --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 254fdaf1da5..c3601fdd66a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "2.15.0" +version = "2.16.0" description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"]