diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index ecd67fc20d9..38760c85de6 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -95,6 +95,8 @@ labelPRBasedOnFilePath: firstPRWelcomeComment: > Thanks a lot for your first contribution! Please check out our contributing guidelines and don't hesitate to ask whatever you need. + In the meantime, check out the #python channel on our AWS Lambda Powertools Discord: [Invite link](https://discord.gg/B8zZKbbyET) + # Comment to be posted to congratulate user on their first merged PR firstPRMergeComment: > Awesome work, congrats on your first merged pull request and thank you for helping improve everyone's experience! @@ -103,6 +105,8 @@ firstPRMergeComment: > firstIssueWelcomeComment: > Thanks for opening your first issue here! We'll come back to you as soon as we can. + In the meantime, check out the #python channel on our AWS Lambda Powertools Discord: [Invite link](https://discord.gg/B8zZKbbyET) + ###### IssueLink Adder ################################################################################################# # Insert Issue (Jira/Github etc) link in PR description based on the Issue ID in PR title. #insertIssueLinkInPrDescription: diff --git a/.github/scripts/comment_on_large_pr.js b/.github/scripts/comment_on_large_pr.js new file mode 100644 index 00000000000..c17199faf76 --- /dev/null +++ b/.github/scripts/comment_on_large_pr.js @@ -0,0 +1,73 @@ +const { + PR_NUMBER, + PR_ACTION, + PR_AUTHOR, + IGNORE_AUTHORS, +} = require("./constants") + + +/** + * Notify PR author to split XXL PR in smaller chunks + * + * @param {object} core - core functions instance from @actions/core + * @param {object} gh_client - Pre-authenticated REST client (Octokit) + * @param {string} owner - GitHub Organization + * @param {string} repository - GitHub repository + */ +const notifyAuthor = async ({ + core, + gh_client, + owner, + repository, +}) => { + core.info(`Commenting on PR ${PR_NUMBER}`) + + let msg = `### ⚠️Large PR detected⚠️ + +Please consider breaking into smaller PRs to avoid significant review delays. Ignore if this PR has naturally grown to this size after reviews. + `; + + try { + await gh_client.rest.issues.createComment({ + owner: owner, + repo: repository, + body: msg, + issue_number: PR_NUMBER, + }); + } catch (error) { + core.setFailed("Failed to notify PR author to split large PR"); + console.error(err); + } +} + +module.exports = async ({github, context, core}) => { + if (IGNORE_AUTHORS.includes(PR_AUTHOR)) { + return core.notice("Author in IGNORE_AUTHORS list; skipping...") + } + + if (PR_ACTION != "labeled") { + return core.notice("Only run on PRs labeling actions; skipping") + } + + + /** @type {string[]} */ + const { data: labels } = await github.rest.issues.listLabelsOnIssue({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: PR_NUMBER, + }) + + // Schema: https://docs.github.com/en/rest/issues/labels#list-labels-for-an-issue + for (const label of labels) { + core.info(`Label: ${label}`) + if (label.name == "size/XXL") { + await notifyAuthor({ + core: core, + gh_client: github, + owner: context.repo.owner, + repository: context.repo.repo, + }) + break; + } + } +} diff --git a/.github/workflows/build_changelog.yml b/.github/workflows/build_changelog.yml index f0501083048..3cd6fffe855 100644 --- a/.github/workflows/build_changelog.yml +++ b/.github/workflows/build_changelog.yml @@ -6,5 +6,4 @@ on: jobs: changelog: - needs: release uses: ./.github/workflows/reusable_publish_changelog.yml diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index edd7e67bf91..f8a7849e7ea 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -2,7 +2,7 @@ name: "CodeQL" on: push: - branches: [develop] + branches: [develop, v2] jobs: analyze: @@ -14,23 +14,23 @@ jobs: matrix: # Override automatic language detection by changing the below list # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] - language: ['python'] + language: ["python"] # Learn more... # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection steps: - - name: Checkout repository - uses: actions/checkout@v3 + - name: Checkout repository + uses: actions/checkout@v3 - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/on_label_added.yml b/.github/workflows/on_label_added.yml new file mode 100644 index 00000000000..e9180d8010a --- /dev/null +++ b/.github/workflows/on_label_added.yml @@ -0,0 +1,38 @@ +name: On Label added + +on: + workflow_run: + workflows: ["Record PR details"] + types: + - completed + +jobs: + get_pr_details: + if: ${{ github.event.workflow_run.conclusion == 'success' }} + uses: ./.github/workflows/reusable_export_pr_details.yml + with: + record_pr_workflow_id: ${{ github.event.workflow_run.id }} + workflow_origin: ${{ github.event.repository.full_name }} + secrets: + token: ${{ secrets.GITHUB_TOKEN }} + + split-large-pr: + needs: get_pr_details + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: write + steps: + - uses: actions/checkout@v3 + # Maintenance: Persist state per PR as an artifact to avoid spam on label add + - name: "Suggest split large Pull Request" + uses: actions/github-script@v6 + env: + PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} + PR_ACTION: ${{ needs.get_pr_details.outputs.prAction }} + PR_AUTHOR: ${{ needs.get_pr_details.outputs.prAuthor }} + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const script = require('.github/scripts/comment_on_large_pr.js'); + await script({github, context, core}); diff --git a/.github/workflows/on_opened_pr.yml b/.github/workflows/on_opened_pr.yml index 2663d605325..6c5979c8b80 100644 --- a/.github/workflows/on_opened_pr.yml +++ b/.github/workflows/on_opened_pr.yml @@ -20,8 +20,6 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: "Debug workflow_run event" - run: echo "${{ github }}" - name: "Ensure related issue is present" uses: actions/github-script@v6 env: diff --git a/.github/workflows/on_push_docs.yml b/.github/workflows/on_push_docs.yml new file mode 100644 index 00000000000..d46879ca6b1 --- /dev/null +++ b/.github/workflows/on_push_docs.yml @@ -0,0 +1,35 @@ +name: Docs + +on: + push: + branches: + - develop + paths: + - "docs/**" + - "mkdocs.yml" + - "examples/**" + +jobs: + changelog: + permissions: + contents: write + uses: ./.github/workflows/reusable_publish_changelog.yml + + release-docs: + needs: changelog + permissions: + contents: write + pages: write + uses: ./.github/workflows/reusable_publish_docs.yml + with: + version: develop + alias: stage +# Maintenance: Only necessary in repo migration +# - name: Create redirect from old docs +# run: | +# git checkout gh-pages +# test -f 404.html && echo "Redirect already set" && exit 0 +# git checkout develop -- 404.html +# git add 404.html +# git commit -m "chore: set docs redirect" --no-verify +# git push origin gh-pages -f diff --git a/.github/workflows/on_release_notes.yml b/.github/workflows/on_release_notes.yml index 563d1fefc79..2b431defff0 100644 --- a/.github/workflows/on_release_notes.yml +++ b/.github/workflows/on_release_notes.yml @@ -69,8 +69,8 @@ jobs: id: release_version # transform tag format `v> $GITHUB_ENV + RELEASE_VERSION="${RELEASE_TAG_VERSION:1}" + echo "RELEASE_VERSION=${RELEASE_VERSION}" >> "$GITHUB_ENV" echo "::set-output name=RELEASE_VERSION::${RELEASE_VERSION}" - name: Install dependencies run: make dev @@ -78,7 +78,7 @@ jobs: if: ${{ !inputs.skip_code_quality }} run: make pr - name: Bump package version - run: poetry version ${RELEASE_VERSION} + run: poetry version "${RELEASE_VERSION}" - name: Build python package and wheel if: ${{ !inputs.skip_pypi }} run: poetry build @@ -101,7 +101,7 @@ jobs: role-to-assume: ${{ secrets.AWS_SAR_ROLE_ARN }} - name: publish lambda layer in SAR by triggering the internal codepipeline run: | - aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_VERSION --overwrite + aws ssm put-parameter --name "powertools-python-release-version" --value "$RELEASE_VERSION" --overwrite aws codepipeline start-pipeline-execution --name ${{ secrets.AWS_SAR_PIPELINE_NAME }} changelog: @@ -115,47 +115,11 @@ jobs: permissions: contents: write pages: write - runs-on: ubuntu-latest - env: - RELEASE_VERSION: ${{ needs.release.outputs.RELEASE_VERSION }} - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Git client setup and refresh tip - run: | - git config user.name "Release bot" - git config user.email "aws-devax-open-source@amazon.com" - git config pull.rebase true - git config remote.origin.url >&- || git remote add origin https://github.com/$origin # Git Detached mode (release notes) doesn't have origin - git pull origin $BRANCH - - name: Install poetry - run: pipx install poetry - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - cache: "poetry" - - name: Install dependencies - run: make dev - - name: Build docs website and API reference - run: | - make release-docs VERSION=${RELEASE_VERSION} ALIAS="latest" - poetry run mike set-default --push latest - - name: Release API docs to release version - uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: ${{ env.RELEASE_VERSION }}/api - - name: Release API docs to latest - uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: latest/api + uses: ./.github/workflows/reusable_publish_docs.yml + with: + version: ${{ needs.release.outputs.RELEASE_VERSION }} + alias: latest + detached_mode: true post_release: needs: release diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index 901c593ebce..29ec0afaad5 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -10,6 +10,7 @@ on: - "mypy.ini" branches: - develop + - v2 push: paths: - "aws_lambda_powertools/**" @@ -19,6 +20,7 @@ on: - "mypy.ini" branches: - develop + - v2 jobs: build: @@ -28,7 +30,6 @@ jobs: matrix: python-version: [3.7, 3.8, 3.9] env: - OS: ${{ matrix.os }} PYTHON: ${{ matrix.python-version }} steps: - uses: actions/checkout@v3 @@ -55,7 +56,5 @@ jobs: uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # 3.1.0 with: file: ./coverage.xml - # flags: unittests - env_vars: OS,PYTHON + env_vars: PYTHON name: aws-lambda-powertools-python-codecov - # fail_ci_if_error: true # failing more consistently making CI unreliable despite all tests above passing diff --git a/.github/workflows/python_docs.yml b/.github/workflows/python_docs.yml deleted file mode 100644 index 75f34bdfcbe..00000000000 --- a/.github/workflows/python_docs.yml +++ /dev/null @@ -1,71 +0,0 @@ -name: Docs - -# Maintenance: Create a reusable workflow to be more easily reused across release, push, and doc hot fixes -# this should include inputs on whether to release API docs, what version to release, and whether to rebuild /latest - -on: - push: - branches: - - develop - paths: - - "docs/**" - - "mkdocs.yml" - - "examples/**" - workflow_dispatch: - -jobs: - changelog: - permissions: - contents: write - uses: ./.github/workflows/reusable_publish_changelog.yml - - docs: - needs: changelog - # Force Github action to run only a single job at a time (based on the group name) - # This is to prevent "race-condition" in publishing a new version of doc to `gh-pages` - concurrency: - group: on-docs-build - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Install poetry - run: pipx install poetry - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - cache: "poetry" - # Maintenance: temporarily until we drop Python 3.6 and make cfn-lint a dev dependency - - name: Setup Cloud Formation Linter with Latest Version - uses: scottbrenner/cfn-lint-action@ee9ee62016ef62c5fd366e6be920df4b310ed353 # v2.2.4 - - name: Install dependencies - run: make dev - - name: Lint documentation - run: | - make lint-docs - cfn-lint examples/**/*.yaml - - name: Setup doc deploy - run: | - git config --global user.name Docs deploy - git config --global user.email docs@dummy.bot.com - git config pull.rebase true - git pull --rebase - - name: Build docs website and API reference - run: make release-docs VERSION="develop" ALIAS="stage" - - name: Deploy all docs - uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: develop/api - - name: Create redirect from old docs - run: | - git checkout gh-pages - test -f 404.html && echo "Redirect already set" && exit 0 - git checkout develop -- 404.html - git add 404.html - git commit -m "chore: set docs redirect" --no-verify - git push origin gh-pages -f diff --git a/.github/workflows/rebuild_latest_docs.yml b/.github/workflows/rebuild_latest_docs.yml index bd9cffe8feb..eb995d95a12 100644 --- a/.github/workflows/rebuild_latest_docs.yml +++ b/.github/workflows/rebuild_latest_docs.yml @@ -5,14 +5,13 @@ name: Rebuild latest docs # # 1. Trigger "Rebuild latest docs" workflow manually: https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow # 2. Use the latest version released under Releases e.g. v1.22.0 -# 3. Set `Build and publish docs only` field to `true` on: workflow_dispatch: inputs: latest_published_version: description: "Latest PyPi published version to rebuild latest docs for, e.g. v1.26.7" - default: "v1.26.7" + default: "v1.28.0" required: true jobs: @@ -21,49 +20,12 @@ jobs: contents: write uses: ./.github/workflows/reusable_publish_changelog.yml - release: + release-docs: needs: changelog - # Force Github action to run only a single job at a time (based on the group name) - # This is to prevent "race-condition" in publishing a new version of doc to `gh-pages` - concurrency: - group: on-docs-rebuild - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Install poetry - run: pipx install poetry - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - cache: "poetry" - - name: Set release notes tag - run: | - RELEASE_TAG_VERSION=${{ github.event.inputs.latest_published_version }} - echo "RELEASE_TAG_VERSION=${RELEASE_TAG_VERSION:1}" >> $GITHUB_ENV - - name: Install dependencies - run: make dev - - name: Setup doc deploy - run: | - git config --global user.name Docs deploy - git config --global user.email aws-devax-open-source@amazon.com - - name: Build docs website and API reference - run: | - make release-docs VERSION=${RELEASE_TAG_VERSION} ALIAS="latest" - poetry run mike set-default --push latest - - name: Release API docs to release version - uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: ${{ env.RELEASE_TAG_VERSION }}/api - - name: Release API docs to latest - uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: latest/api + permissions: + contents: write + pages: write + uses: ./.github/workflows/reusable_publish_docs.yml + with: + version: ${{ inputs.latest_published_version }} + alias: latest diff --git a/.github/workflows/record_pr.yml b/.github/workflows/record_pr.yml index 44f445a70ac..b1638ad8865 100644 --- a/.github/workflows/record_pr.yml +++ b/.github/workflows/record_pr.yml @@ -2,7 +2,7 @@ name: Record PR details on: pull_request: - types: [opened, edited, closed] + types: [opened, edited, closed, labeled] jobs: record_pr: diff --git a/.github/workflows/reusable_deploy_layer_stack.yml b/.github/workflows/reusable_deploy_layer_stack.yml index e1190e19873..20d69b9c814 100644 --- a/.github/workflows/reusable_deploy_layer_stack.yml +++ b/.github/workflows/reusable_deploy_layer_stack.yml @@ -8,12 +8,15 @@ on: workflow_call: inputs: stage: + description: "Deployment stage (BETA, PROD)" required: true type: string artefact-name: + description: "CDK Layer Artefact name to download" required: true type: string environment: + description: "GitHub Environment to use for encrypted secrets" required: true type: string diff --git a/.github/workflows/reusable_export_pr_details.yml b/.github/workflows/reusable_export_pr_details.yml index 86c3e7d645e..ec168b95f20 100644 --- a/.github/workflows/reusable_export_pr_details.yml +++ b/.github/workflows/reusable_export_pr_details.yml @@ -4,13 +4,16 @@ on: workflow_call: inputs: record_pr_workflow_id: + description: "Record PR workflow execution ID to download PR details" required: true type: number workflow_origin: # see https://github.com/awslabs/aws-lambda-powertools-python/issues/1349 + description: "Repository full name for runner integrity" required: true type: string secrets: token: + description: "GitHub Actions temporary and scoped token" required: true # Map the workflow outputs to job outputs outputs: diff --git a/.github/workflows/reusable_publish_changelog.yml b/.github/workflows/reusable_publish_changelog.yml index 2cb786ed86a..30e49b98ae5 100644 --- a/.github/workflows/reusable_publish_changelog.yml +++ b/.github/workflows/reusable_publish_changelog.yml @@ -26,13 +26,15 @@ jobs: git config user.name "Release bot" git config user.email "aws-devax-open-source@amazon.com" git config pull.rebase true - git config remote.origin.url >&- || git remote add origin https://github.com/$origin # Git Detached mode (release notes) doesn't have origin - git pull origin $BRANCH + git config remote.origin.url >&- || git remote add origin https://github.com/"${origin}" # Git Detached mode (release notes) doesn't have origin + git pull origin "${BRANCH}" - name: "Generate latest changelog" run: make changelog - name: Update Changelog in trunk run: | + HAS_CHANGE=$(git status --porcelain) + test -z "${HAS_CHANGE}" && echo "Nothing to update" && exit 0 git add CHANGELOG.md git commit -m "update changelog with latest changes" - git pull origin $BRANCH # prevents concurrent branch update failing push - git push origin HEAD:refs/heads/$BRANCH + git pull origin "${BRANCH}" # prevents concurrent branch update failing push + git push origin HEAD:refs/heads/"${BRANCH}" diff --git a/.github/workflows/reusable_publish_docs.yml b/.github/workflows/reusable_publish_docs.yml new file mode 100644 index 00000000000..a157783abf4 --- /dev/null +++ b/.github/workflows/reusable_publish_docs.yml @@ -0,0 +1,83 @@ +name: Reusable publish documentation + +env: + BRANCH: develop + ORIGIN: awslabs/aws-lambda-powertools-python + +on: + workflow_call: + inputs: + version: + description: "Version to build and publish docs (v1.28.0, develop)" + required: true + type: string + alias: + description: "Alias to associate version (latest, stage)" + required: true + type: string + detached_mode: + description: "Whether it's running in git detached mode to ensure git is sync'd" + required: false + default: false + type: boolean + +permissions: + contents: write + pages: write + +jobs: + publish_docs: + # Force Github action to run only a single job at a time (based on the group name) + # This is to prevent "race-condition" in publishing a new version of doc to `gh-pages` + concurrency: + group: on-docs-rebuild + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Install poetry + run: pipx install poetry + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.8" + cache: "poetry" + - name: Install dependencies + run: make dev + - name: Git client setup + run: | + git config --global user.name Docs deploy + git config --global user.email aws-devax-open-source@amazon.com + - name: Git refresh tip (detached mode) + # Git Detached mode (release notes) doesn't have origin + if: ${{ inputs.detached_mode }} + run: | + git config pull.rebase true + git config remote.origin.url >&- || git remote add origin https://github.com/"$ORIGIN" + git pull origin "$BRANCH" + - name: Build docs website and API reference + env: + VERSION: ${{ inputs.version }} + ALIAS: ${{ inputs.alias }} + run: | + make release-docs VERSION="$VERSION" ALIAS="$ALIAS" + poetry run mike set-default --push latest + # Maintenance: Migrate to new gh-pages action + - name: Release API docs + uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 + env: + VERSION: ${{ inputs.version }} + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./api + keep_files: true + destination_dir: ${{ env.VERSION }}/api + - name: Release API docs to latest + if: ${{ inputs.alias == 'latest' }} + uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./api + keep_files: true + destination_dir: latest/api diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index 5b1372d54aa..7786903e218 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -4,7 +4,14 @@ on: workflow_dispatch: push: - branches: [develop] + branches: [develop, v2] + paths: + - "aws_lambda_powertools/**" + - "tests/e2e/**" + - "pyproject.toml" + - "poetry.lock" + - "mypy.ini" + - "parallel_run_e2e.py" # Maintenance: Add support for triggering on `run-e2e` label # and enforce repo origin to prevent abuse @@ -21,8 +28,8 @@ jobs: strategy: matrix: # Maintenance: disabled until we discover concurrency lock issue with multiple versions and tmp - version: ["3.7", "3.8", "3.9"] - # version: ["3.7"] + # version: ["3.7", "3.8", "3.9"] + version: ["3.7"] steps: - name: "Checkout" uses: actions/checkout@v3 diff --git a/.github/workflows/v2_on_push_docs.yml b/.github/workflows/v2_on_push_docs.yml new file mode 100644 index 00000000000..d70fedbc6c5 --- /dev/null +++ b/.github/workflows/v2_on_push_docs.yml @@ -0,0 +1,36 @@ +name: Docs v2 + +on: + workflow_dispatch: +# push: +# branches: +# - v2 +# paths: +# - "docs/**" +# - "mkdocs.yml" +# - "examples/**" + +jobs: + changelog: + permissions: + contents: write + uses: ./.github/workflows/reusable_publish_changelog.yml + + release-docs: + needs: changelog + permissions: + contents: write + pages: write + uses: ./.github/workflows/reusable_publish_docs.yml + with: + version: v2 + alias: alpha +# Maintenance: Only necessary in repo migration +# - name: Create redirect from old docs +# run: | +# git checkout gh-pages +# test -f 404.html && echo "Redirect already set" && exit 0 +# git checkout develop -- 404.html +# git add 404.html +# git commit -m "chore: set docs redirect" --no-verify +# git push origin gh-pages -f diff --git a/.github/workflows/v2_rebuild_latest_docs.yml b/.github/workflows/v2_rebuild_latest_docs.yml new file mode 100644 index 00000000000..6d833cc3fef --- /dev/null +++ b/.github/workflows/v2_rebuild_latest_docs.yml @@ -0,0 +1,14 @@ +name: V2 Rebuild latest docs + +on: + workflow_dispatch: + +jobs: + release-docs: + permissions: + contents: write + pages: write + uses: ./.github/workflows/reusable_publish_docs.yml + with: + version: v2 + alias: alpha diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a41e0d945c..71b1125cf54 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,3 +39,8 @@ repos: hooks: - id: cfn-python-lint files: examples/.*\.(yaml|yml)$ + - repo: https://github.com/rhysd/actionlint + rev: v1.6.16 + hooks: + - id: actionlint-docker + args: [-pyflakes=] diff --git a/CHANGELOG.md b/CHANGELOG.md index 20c156aec8e..ea3284645b5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,120 @@ ## Bug Fixes +* **ci:** pass core fns to large pr workflow script +* **ci:** on_label permissioning model & workflow execution +* **ci:** ensure PR_AUTHOR is present for large_pr_split workflow +* **ci:** gracefully and successful exit changelog upon no changes +* **ci:** event resolution for on_label_added workflow +* **core:** fixes leftovers from rebase + +## Documentation + +* **layer:** upgrade to 1.28.0 (v33) + +## Features + +* **data-classes:** add KafkaEvent and KafkaEventRecord ([#1485](https://github.com/awslabs/aws-lambda-powertools-python/issues/1485)) +* **event_sources:** add CloudWatch dashboard custom widget event ([#1474](https://github.com/awslabs/aws-lambda-powertools-python/issues/1474)) +* **parser:** add KafkaMskEventModel and KafkaSelfManagedEventModel ([#1499](https://github.com/awslabs/aws-lambda-powertools-python/issues/1499)) + +## Maintenance + +* **ci:** add note for state persistence on comment_large_pr +* **ci:** add workflow to suggest splitting large PRs ([#1480](https://github.com/awslabs/aws-lambda-powertools-python/issues/1480)) +* **ci:** enable ci checks for v2 +* **ci:** sync package version with pypi +* **ci:** record pr details upon labeling +* **ci:** create adhoc docs workflow for v2 +* **ci:** create adhoc docs workflow for v2 +* **ci:** destructure assignment on comment_large_pr +* **ci:** create docs workflow for v2 +* **ci:** create reusable docs publishing workflow ([#1482](https://github.com/awslabs/aws-lambda-powertools-python/issues/1482)) +* **ci:** format comment on comment_large_pr script +* **ci:** disable v2 docs +* **ci:** add linter for GitHub Actions as pre-commit hook ([#1479](https://github.com/awslabs/aws-lambda-powertools-python/issues/1479)) +* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#306](https://github.com/awslabs/aws-lambda-powertools-python/issues/306)) +* **deps-dev:** bump mkdocs-material from 8.4.1 to 8.4.2 ([#1483](https://github.com/awslabs/aws-lambda-powertools-python/issues/1483)) +* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#1481](https://github.com/awslabs/aws-lambda-powertools-python/issues/1481)) +* **deps-dev:** revert to v1.28.0 dependencies +* **maintenance:** add discord link to first PR and first issue ([#1493](https://github.com/awslabs/aws-lambda-powertools-python/issues/1493)) + + + +## [v1.29.0] - 2022-09-13 +## Bug Fixes + +* **ci:** pass core fns to large pr workflow script +* **ci:** on_label permissioning model & workflow execution +* **ci:** ensure PR_AUTHOR is present for large_pr_split workflow +* **ci:** gracefully and successful exit changelog upon no changes +* **ci:** event resolution for on_label_added workflow +* **event_handler:** fix bug with previous array implementation + +## Code Refactoring + +* **batch:** remove legacy sqs_batch_processor ([#1492](https://github.com/awslabs/aws-lambda-powertools-python/issues/1492)) + +## Documentation + +* **homepage:** note about v2 version +* **layer:** upgrade to 1.28.0 (v33) + +## Features + +* **ci:** add actionlint in pre-commit hook +* **data-classes:** add KafkaEvent and KafkaEventRecord ([#1485](https://github.com/awslabs/aws-lambda-powertools-python/issues/1485)) +* **event_handler:** add cookies as 1st class citizen in v2 ([#1487](https://github.com/awslabs/aws-lambda-powertools-python/issues/1487)) +* **event_handler:** improved support for headers and cookies in v2 ([#1455](https://github.com/awslabs/aws-lambda-powertools-python/issues/1455)) +* **event_sources:** add CloudWatch dashboard custom widget event ([#1474](https://github.com/awslabs/aws-lambda-powertools-python/issues/1474)) +* **parser:** add KafkaMskEventModel and KafkaSelfManagedEventModel ([#1499](https://github.com/awslabs/aws-lambda-powertools-python/issues/1499)) +* **tracer:** support methods with the same name (ABCs) by including fully qualified name in v2 ([#1486](https://github.com/awslabs/aws-lambda-powertools-python/issues/1486)) + +## Maintenance + +* **bandit:** update baseline +* **ci:** create adhoc docs workflow for v2 +* **ci:** create docs workflow for v2 +* **ci:** create adhoc docs workflow for v2 +* **ci:** limit E2E workflow run for source code change +* **ci:** remove unused and undeclared OS matrix env +* **ci:** add missing description fields +* **ci:** fix invalid dependency leftover +* **ci:** create reusable docs publishing workflow ([#1482](https://github.com/awslabs/aws-lambda-powertools-python/issues/1482)) +* **ci:** remove dangling debug step +* **ci:** add linter for GitHub Actions as pre-commit hook ([#1479](https://github.com/awslabs/aws-lambda-powertools-python/issues/1479)) +* **ci:** add workflow to suggest splitting large PRs ([#1480](https://github.com/awslabs/aws-lambda-powertools-python/issues/1480)) +* **ci:** enable ci checks for v2 +* **ci:** destructure assignment on comment_large_pr +* **ci:** record pr details upon labeling +* **ci:** sync package version with pypi +* **ci:** add note for state persistence on comment_large_pr +* **ci:** format comment on comment_large_pr script +* **deps:** bump pydantic from 1.10.0 to 1.10.1 ([#1491](https://github.com/awslabs/aws-lambda-powertools-python/issues/1491)) +* **deps:** bump pydantic from 1.10.1 to 1.10.2 ([#1502](https://github.com/awslabs/aws-lambda-powertools-python/issues/1502)) +* **deps:** bump aws-cdk-aws-apigatewayv2-integrations-alpha from 2.40.0a0 to 2.41.0a0 ([#1510](https://github.com/awslabs/aws-lambda-powertools-python/issues/1510)) +* **deps:** bump types-requests from 2.28.9 to 2.28.10 ([#1508](https://github.com/awslabs/aws-lambda-powertools-python/issues/1508)) +* **deps:** bump mkdocs-material from 8.4.3 to 8.4.4 ([#1513](https://github.com/awslabs/aws-lambda-powertools-python/issues/1513)) +* **deps:** bump flake8-bugbear from 22.8.23 to 22.9.11 ([#1512](https://github.com/awslabs/aws-lambda-powertools-python/issues/1512)) +* **deps-dev:** bump mkdocs-material from 8.4.1 to 8.4.2 ([#1483](https://github.com/awslabs/aws-lambda-powertools-python/issues/1483)) +* **deps-dev:** bump flake8-variables-names from 0.0.4 to 0.0.5 ([#1490](https://github.com/awslabs/aws-lambda-powertools-python/issues/1490)) +* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#306](https://github.com/awslabs/aws-lambda-powertools-python/issues/306)) +* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#1481](https://github.com/awslabs/aws-lambda-powertools-python/issues/1481)) +* **deps-dev:** bump mypy-boto3-ssm from 1.24.39.post2 to 1.24.69 ([#1509](https://github.com/awslabs/aws-lambda-powertools-python/issues/1509)) +* **deps-dev:** bump aws-cdk-lib from 2.39.1 to 2.40.0 ([#1495](https://github.com/awslabs/aws-lambda-powertools-python/issues/1495)) +* **deps-dev:** bump black from 22.6.0 to 22.8.0 ([#1494](https://github.com/awslabs/aws-lambda-powertools-python/issues/1494)) +* **deps-dev:** bump pytest from 7.1.2 to 7.1.3 ([#1497](https://github.com/awslabs/aws-lambda-powertools-python/issues/1497)) +* **deps-dev:** bump mkdocs-material from 8.4.2 to 8.4.3 ([#1504](https://github.com/awslabs/aws-lambda-powertools-python/issues/1504)) +* **deps-dev:** bump aws-cdk-aws-apigatewayv2-integrations-alpha from 2.39.1a0 to 2.40.0a0 ([#1496](https://github.com/awslabs/aws-lambda-powertools-python/issues/1496)) +* **deps-dev:** bump aws-cdk-lib from 2.40.0 to 2.41.0 ([#1507](https://github.com/awslabs/aws-lambda-powertools-python/issues/1507)) +* **maintainers:** update release workflow link +* **maintenance:** add discord link to first PR and first issue ([#1493](https://github.com/awslabs/aws-lambda-powertools-python/issues/1493)) + + + +## [v1.28.0] - 2022-08-25 +## Bug Fixes + * **ci:** calculate parallel jobs based on infrastructure needs ([#1475](https://github.com/awslabs/aws-lambda-powertools-python/issues/1475)) * **ci:** del flake8 direct dep over py3.6 conflicts and docs failure * **ci:** move from pip-tools to poetry on layers reusable workflow @@ -61,6 +175,9 @@ * **tests:** refactor E2E logger to ease maintenance, writing tests and parallelization ([#1460](https://github.com/awslabs/aws-lambda-powertools-python/issues/1460)) * **tests:** refactor E2E tracer to ease maintenance, writing tests and parallelization ([#1457](https://github.com/awslabs/aws-lambda-powertools-python/issues/1457)) +## Reverts +* fix(ci): add explicit origin to fix release detached head + ## [v1.27.0] - 2022-08-05 @@ -2256,7 +2373,9 @@ * Merge pull request [#5](https://github.com/awslabs/aws-lambda-powertools-python/issues/5) from jfuss/feat/python38 -[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.27.0...HEAD +[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.29.0...HEAD +[v1.29.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.28.0...v1.29.0 +[v1.28.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.27.0...v1.28.0 [v1.27.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.26.7...v1.27.0 [v1.26.7]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.26.6...v1.26.7 [v1.26.6]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.26.5...v1.26.6 diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 63f2b3eb8f8..fb94090f762 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -208,7 +208,7 @@ These are some questions to keep in mind when drafting your first or future rele Once you're happy, hit `Publish release` 🎉🎉🎉. -This will kick off the [Publishing workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/publish.yml) and within a few minutes you should see the latest version in PyPi, and all issues labeled as `pending-release` will be closed and notified. +This will kick off the [Publishing workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/on_release_notes.yml) and within a few minutes you should see the latest version in PyPi, and all issues labeled as `pending-release` will be closed and notified. > TODO: Include information to verify SAR and Lambda Layers deployment; we're still finalizing Lambda Layer automated deployment in GitHub Actions - ping @am29d when in doubt. diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index 903fc7e828f..ae02a659359 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -32,7 +32,7 @@ _SAFE_URI = "-._~()'!*:@,;" # https://www.ietf.org/rfc/rfc3986.txt # API GW/ALB decode non-safe URI chars; we must support them too _UNSAFE_URI = "%<> \[\]{}|^" # noqa: W605 -_NAMED_GROUP_BOUNDARY_PATTERN = fr"(?P\1[{_SAFE_URI}{_UNSAFE_URI}\\w]+)" +_NAMED_GROUP_BOUNDARY_PATTERN = rf"(?P\1[{_SAFE_URI}{_UNSAFE_URI}\\w]+)" class ProxyEventType(Enum): diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py index 37621f8274a..e9bc3521125 100644 --- a/aws_lambda_powertools/shared/functions.py +++ b/aws_lambda_powertools/shared/functions.py @@ -1,5 +1,10 @@ +import base64 +import logging +from binascii import Error as BinAsciiError from typing import Optional, Union +logger = logging.getLogger(__name__) + def strtobool(value: str) -> bool: """Convert a string representation of truth to True or False. @@ -58,3 +63,18 @@ def resolve_env_var_choice( resolved choice as either bool or environment value """ return choice if choice is not None else env + + +def base64_decode(value: str) -> bytes: + try: + logger.debug("Decoding base64 Kafka record item before parsing") + return base64.b64decode(value) + except (BinAsciiError, TypeError): + raise ValueError("base64 decode failed") + + +def bytes_to_string(value: bytes) -> str: + try: + return value.decode("utf-8") + except (BinAsciiError, TypeError): + raise ValueError("base64 UTF-8 decode failed") diff --git a/aws_lambda_powertools/utilities/data_classes/__init__.py b/aws_lambda_powertools/utilities/data_classes/__init__.py index b20ec504732..8ed77f9f3a3 100644 --- a/aws_lambda_powertools/utilities/data_classes/__init__.py +++ b/aws_lambda_powertools/utilities/data_classes/__init__.py @@ -5,12 +5,14 @@ from .alb_event import ALBEvent from .api_gateway_proxy_event import APIGatewayProxyEvent, APIGatewayProxyEventV2 from .appsync_resolver_event import AppSyncResolverEvent +from .cloud_watch_custom_widget_event import CloudWatchDashboardCustomWidgetEvent from .cloud_watch_logs_event import CloudWatchLogsEvent from .code_pipeline_job_event import CodePipelineJobEvent from .connect_contact_flow_event import ConnectContactFlowEvent from .dynamo_db_stream_event import DynamoDBStreamEvent from .event_bridge_event import EventBridgeEvent from .event_source import event_source +from .kafka_event import KafkaEvent from .kinesis_stream_event import KinesisStreamEvent from .lambda_function_url_event import LambdaFunctionUrlEvent from .s3_event import S3Event @@ -23,11 +25,13 @@ "APIGatewayProxyEventV2", "AppSyncResolverEvent", "ALBEvent", + "CloudWatchDashboardCustomWidgetEvent", "CloudWatchLogsEvent", "CodePipelineJobEvent", "ConnectContactFlowEvent", "DynamoDBStreamEvent", "EventBridgeEvent", + "KafkaEvent", "KinesisStreamEvent", "LambdaFunctionUrlEvent", "S3Event", diff --git a/aws_lambda_powertools/utilities/data_classes/cloud_watch_custom_widget_event.py b/aws_lambda_powertools/utilities/data_classes/cloud_watch_custom_widget_event.py new file mode 100644 index 00000000000..40219f944ba --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/cloud_watch_custom_widget_event.py @@ -0,0 +1,158 @@ +from typing import Any, Dict, Optional + +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper + + +class TimeZone(DictWrapper): + @property + def label(self) -> str: + """The time range label. Either 'UTC' or 'Local'""" + return self["label"] + + @property + def offset_iso(self) -> str: + """The time range offset in the format +/-00:00""" + return self["offsetISO"] + + @property + def offset_in_minutes(self) -> int: + """The time range offset in minutes""" + return int(self["offsetInMinutes"]) + + +class TimeRange(DictWrapper): + @property + def mode(self) -> str: + """The time range mode, i.e. 'relative' or 'absolute'""" + return self["mode"] + + @property + def start(self) -> int: + """The start time within the time range""" + return self["start"] + + @property + def end(self) -> int: + """The end time within the time range""" + return self["end"] + + @property + def relative_start(self) -> Optional[int]: + """The relative start time within the time range""" + return self.get("relativeStart") + + @property + def zoom_start(self) -> Optional[int]: + """The start time within the zoomed time range""" + return (self.get("zoom") or {}).get("start") + + @property + def zoom_end(self) -> Optional[int]: + """The end time within the zoomed time range""" + return (self.get("zoom") or {}).get("end") + + +class CloudWatchWidgetContext(DictWrapper): + @property + def dashboard_name(self) -> str: + """Get dashboard name, in which the widget is used""" + return self["dashboardName"] + + @property + def widget_id(self) -> str: + """Get widget ID""" + return self["widgetId"] + + @property + def domain(self) -> str: + """AWS domain name""" + return self["domain"] + + @property + def account_id(self) -> str: + """Get AWS Account ID""" + return self["accountId"] + + @property + def locale(self) -> str: + """Get locale language""" + return self["locale"] + + @property + def timezone(self) -> TimeZone: + """Timezone information of the dashboard""" + return TimeZone(self["timezone"]) + + @property + def period(self) -> int: + """The period shown on the dashboard""" + return int(self["period"]) + + @property + def is_auto_period(self) -> bool: + """Whether auto period is enabled""" + return bool(self["isAutoPeriod"]) + + @property + def time_range(self) -> TimeRange: + """The widget time range""" + return TimeRange(self["timeRange"]) + + @property + def theme(self) -> str: + """The dashboard theme, i.e. 'light' or 'dark'""" + return self["theme"] + + @property + def link_charts(self) -> bool: + """The widget is linked to other charts""" + return bool(self["linkCharts"]) + + @property + def title(self) -> str: + """Get widget title""" + return self["title"] + + @property + def params(self) -> Dict[str, Any]: + """Get widget parameters""" + return self["params"] + + @property + def forms(self) -> Dict[str, Any]: + """Get widget form data""" + return self["forms"]["all"] + + @property + def height(self) -> int: + """Get widget height""" + return int(self["height"]) + + @property + def width(self) -> int: + """Get widget width""" + return int(self["width"]) + + +class CloudWatchDashboardCustomWidgetEvent(DictWrapper): + """CloudWatch dashboard custom widget event + + You can use a Lambda function to create a custom widget on a CloudWatch dashboard. + + Documentation: + ------------- + - https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/add_custom_widget_dashboard_about.html + """ + + @property + def describe(self) -> bool: + """Display widget documentation""" + return bool(self.get("describe", False)) + + @property + def widget_context(self) -> Optional[CloudWatchWidgetContext]: + """The widget context""" + if self.get("widgetContext"): + return CloudWatchWidgetContext(self["widgetContext"]) + + return None diff --git a/aws_lambda_powertools/utilities/data_classes/kafka_event.py b/aws_lambda_powertools/utilities/data_classes/kafka_event.py new file mode 100644 index 00000000000..e52cc5d8dc1 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/kafka_event.py @@ -0,0 +1,125 @@ +import base64 +import json +from typing import Any, Dict, Iterator, List, Optional + +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper + + +class KafkaEventRecord(DictWrapper): + @property + def topic(self) -> str: + """The Kafka topic.""" + return self["topic"] + + @property + def partition(self) -> str: + """The Kafka record parition.""" + return self["partition"] + + @property + def offset(self) -> str: + """The Kafka record offset.""" + return self["offset"] + + @property + def timestamp(self) -> int: + """The Kafka record timestamp.""" + return self["timestamp"] + + @property + def timestamp_type(self) -> str: + """The Kafka record timestamp type.""" + return self["timestampType"] + + @property + def key(self) -> str: + """The raw (base64 encoded) Kafka record key.""" + return self["key"] + + @property + def decoded_key(self) -> bytes: + """Decode the base64 encoded key as bytes.""" + return base64.b64decode(self.key) + + @property + def value(self) -> str: + """The raw (base64 encoded) Kafka record value.""" + return self["value"] + + @property + def decoded_value(self) -> bytes: + """Decodes the base64 encoded value as bytes.""" + return base64.b64decode(self.value) + + @property + def json_value(self) -> Any: + """Decodes the text encoded data as JSON.""" + if self._json_data is None: + self._json_data = json.loads(self.decoded_value.decode("utf-8")) + return self._json_data + + @property + def headers(self) -> List[Dict[str, List[int]]]: + """The raw Kafka record headers.""" + return self["headers"] + + @property + def decoded_headers(self) -> Dict[str, bytes]: + """Decodes the headers as a single dictionary.""" + return {k: bytes(v) for chunk in self.headers for k, v in chunk.items()} + + def get_header_value( + self, name: str, default_value: Optional[Any] = None, case_sensitive: bool = True + ) -> Optional[bytes]: + """Get a decoded header value by name.""" + if case_sensitive: + return self.decoded_headers.get(name, default_value) + name_lower = name.lower() + + return next( + # Iterate over the dict and do a case-insensitive key comparison + (value for key, value in self.decoded_headers.items() if key.lower() == name_lower), + # Default value is returned if no matches was found + default_value, + ) + + +class KafkaEvent(DictWrapper): + """Self-managed or MSK Apache Kafka event trigger + Documentation: + -------------- + - https://docs.aws.amazon.com/lambda/latest/dg/with-kafka.html + - https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html + """ + + @property + def event_source(self) -> str: + """The AWS service from which the Kafka event record originated.""" + return self["eventSource"] + + @property + def event_source_arn(self) -> Optional[str]: + """The AWS service ARN from which the Kafka event record originated, mandatory for AWS MSK.""" + return self.get("eventSourceArn") + + @property + def bootstrap_servers(self) -> str: + """The Kafka bootstrap URL.""" + return self["bootstrapServers"] + + @property + def decoded_bootstrap_servers(self) -> List[str]: + """The decoded Kafka bootstrap URL.""" + return self.bootstrap_servers.split(",") + + @property + def records(self) -> Iterator[KafkaEventRecord]: + """The Kafka records.""" + for chunk in self["records"].values(): + for record in chunk: + yield KafkaEventRecord(record) + + @property + def record(self) -> KafkaEventRecord: + """The next Kafka record.""" + return next(self.records) diff --git a/aws_lambda_powertools/utilities/parser/envelopes/__init__.py b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py index 7d42fd81ad6..4b0e4c943a2 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/__init__.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py @@ -4,6 +4,7 @@ from .cloudwatch import CloudWatchLogsEnvelope from .dynamodb import DynamoDBStreamEnvelope from .event_bridge import EventBridgeEnvelope +from .kafka import KafkaEnvelope from .kinesis import KinesisDataStreamEnvelope from .lambda_function_url import LambdaFunctionUrlEnvelope from .sns import SnsEnvelope, SnsSqsEnvelope @@ -20,5 +21,6 @@ "SnsEnvelope", "SnsSqsEnvelope", "SqsEnvelope", + "KafkaEnvelope", "BaseEnvelope", ] diff --git a/aws_lambda_powertools/utilities/parser/envelopes/kafka.py b/aws_lambda_powertools/utilities/parser/envelopes/kafka.py new file mode 100644 index 00000000000..4e6564c3ec0 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/envelopes/kafka.py @@ -0,0 +1,44 @@ +import logging +from typing import Any, Dict, List, Optional, Type, Union, cast + +from ..models import KafkaMskEventModel, KafkaSelfManagedEventModel +from ..types import Model +from .base import BaseEnvelope + +logger = logging.getLogger(__name__) + + +class KafkaEnvelope(BaseEnvelope): + """Kafka event envelope to extract data within body key + The record's body parameter is a string, though it can also be a JSON encoded string. + Regardless of its type it'll be parsed into a BaseModel object. + + Note: Records will be parsed the same way so if model is str, + all items in the list will be parsed as str and npt as JSON (and vice versa) + """ + + def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) -> List[Optional[Model]]: + """Parses data found with model provided + + Parameters + ---------- + data : Dict + Lambda event to be parsed + model : Type[Model] + Data model provided to parse after extracting data using envelope + + Returns + ------- + List + List of records parsed with model provided + """ + event_source = cast(dict, data).get("eventSource") + model_parse_event = KafkaMskEventModel if event_source == "aws:kafka" else KafkaSelfManagedEventModel + + logger.debug(f"Parsing incoming data with Kafka event model {model_parse_event}") + parsed_envelope = model_parse_event.parse_obj(data) + logger.debug(f"Parsing Kafka event records in `value` with {model}") + ret_list = [] + for records in parsed_envelope.records.values(): + ret_list += [self._parse(data=record.value, model=model) for record in records] + return ret_list diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py index 11ab6501fa9..6d403019181 100644 --- a/aws_lambda_powertools/utilities/parser/models/__init__.py +++ b/aws_lambda_powertools/utilities/parser/models/__init__.py @@ -17,6 +17,7 @@ from .cloudwatch import CloudWatchLogsData, CloudWatchLogsDecode, CloudWatchLogsLogEvent, CloudWatchLogsModel from .dynamodb import DynamoDBStreamChangedRecordModel, DynamoDBStreamModel, DynamoDBStreamRecordModel from .event_bridge import EventBridgeModel +from .kafka import KafkaBaseEventModel, KafkaMskEventModel, KafkaRecordModel, KafkaSelfManagedEventModel from .kinesis import KinesisDataStreamModel, KinesisDataStreamRecord, KinesisDataStreamRecordPayload from .lambda_function_url import LambdaFunctionUrlModel from .s3 import S3Model, S3RecordModel @@ -98,4 +99,8 @@ "APIGatewayEventRequestContext", "APIGatewayEventAuthorizer", "APIGatewayEventIdentity", + "KafkaSelfManagedEventModel", + "KafkaRecordModel", + "KafkaMskEventModel", + "KafkaBaseEventModel", ] diff --git a/aws_lambda_powertools/utilities/parser/models/kafka.py b/aws_lambda_powertools/utilities/parser/models/kafka.py new file mode 100644 index 00000000000..d4c36bf70f1 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/kafka.py @@ -0,0 +1,65 @@ +from datetime import datetime +from typing import Dict, List, Type, Union + +from pydantic import BaseModel, validator + +from aws_lambda_powertools.shared.functions import base64_decode, bytes_to_string +from aws_lambda_powertools.utilities.parser.types import Literal + +SERVERS_DELIMITER = "," + + +class KafkaRecordModel(BaseModel): + topic: str + partition: int + offset: int + timestamp: datetime + timestampType: str + key: bytes + value: Union[str, Type[BaseModel]] + headers: List[Dict[str, bytes]] + + # validators + _decode_key = validator("key", allow_reuse=True)(base64_decode) + + @validator("value", pre=True, allow_reuse=True) + def data_base64_decode(cls, value): + as_bytes = base64_decode(value) + return bytes_to_string(as_bytes) + + @validator("headers", pre=True, allow_reuse=True) + def decode_headers_list(cls, value): + for header in value: + for key, values in header.items(): + header[key] = bytes(values) + return value + + +class KafkaBaseEventModel(BaseModel): + bootstrapServers: List[str] + records: Dict[str, List[KafkaRecordModel]] + + @validator("bootstrapServers", pre=True, allow_reuse=True) + def split_servers(cls, value): + return None if not value else value.split(SERVERS_DELIMITER) + + +class KafkaSelfManagedEventModel(KafkaBaseEventModel): + """Self-managed Apache Kafka event trigger + Documentation: + -------------- + - https://docs.aws.amazon.com/lambda/latest/dg/with-kafka.html + """ + + eventSource: Literal["aws:SelfManagedKafka"] + + +class KafkaMskEventModel(KafkaBaseEventModel): + """Fully-managed AWS Apache Kafka event trigger + Documentation: + -------------- + - https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html + """ + + eventSource: Literal["aws:kafka"] + eventSourceArn: str diff --git a/bandit.baseline b/bandit.baseline index 2421c4f1e24..a3cd5484039 100644 --- a/bandit.baseline +++ b/bandit.baseline @@ -1,226 +1,1769 @@ { "errors": [], - "generated_at": "2020-05-12T08:59:59Z", + "generated_at": "2022-09-13T10:44:42Z", "metrics": { "_totals": { - "CONFIDENCE.HIGH": 1.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 1.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 1375, - "nosec": 0 + "CONFIDENCE.HIGH": 1, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 1, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 12060, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/__init__.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 6, - "nosec": 0 - }, - "aws_lambda_powertools/helper/__init__.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 2, - "nosec": 0 - }, - "aws_lambda_powertools/helper/models.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 108, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 7, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/event_handler/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 23, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/event_handler/api_gateway.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 639, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/event_handler/appsync.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 140, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/event_handler/content_types.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 3, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/event_handler/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 30, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/event_handler/lambda_function_url.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 40, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/exceptions/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 3, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/logging/__init__.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 5, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 4, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/logging/correlation_paths.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 9, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/logging/exceptions.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, "loc": 2, - "nosec": 0 + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/logging/filters.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 12, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/logging/formatter.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 220, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/logging/lambda_context.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 47, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/logging/logger.py": { - "CONFIDENCE.HIGH": 1.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 1.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 375, - "nosec": 0 + "CONFIDENCE.HIGH": 1, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 1, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 366, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/logging/utils.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 58, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/metrics/__init__.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 15, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 14, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/metrics/base.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 162, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 241, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/metrics/exceptions.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 12, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 9, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/metrics/metric.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 90, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 95, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/metrics/metrics.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 82, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 170, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/middleware_factory/__init__.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, "loc": 3, - "nosec": 0 + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/middleware_factory/exceptions.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 3, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 2, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/middleware_factory/factory.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 103, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 106, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/package_logger.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 5, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 0, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/cache_dict.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 25, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/constants.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 24, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/functions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 61, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/json_encoder.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 13, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/lazy_import.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 24, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/types.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 3, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/tracing/__init__.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 6, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 5, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/tracing/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 117, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/tracing/extensions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 11, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/tracing/tracer.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 641, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 1, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/batch/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 24, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/batch/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 333, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/batch/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 33, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/batch/sqs.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 204, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 40, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/active_mq_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 100, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/alb_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 23, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/api_gateway_authorizer_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 439, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 194, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/appsync/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 0, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/appsync/scalar_types_utils.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 73, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/appsync_authorizer_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 89, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/appsync_resolver_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 178, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/cloud_watch_custom_widget_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 122, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/cloud_watch_logs_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 77, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 185, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/cognito_user_pool_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 624, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/common.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 313, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/connect_contact_flow_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 128, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 237, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/event_bridge_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 53, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/event_source.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 31, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/kafka_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 99, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/kinesis_stream_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 77, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/lambda_function_url_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 12, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/rabbit_mq_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 93, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/s3_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 143, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/s3_object_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 256, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/ses_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 202, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/sns_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 91, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/sqs_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 111, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/feature_flags/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 14, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/feature_flags/appconfig.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 92, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/feature_flags/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 50, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/feature_flags/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 8, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/feature_flags/feature_flags.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 247, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/feature_flags/schema.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 216, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 7, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 177, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/config.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 48, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 35, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/idempotency.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 128, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/persistence/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 0, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/persistence/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, "loc": 401, - "nosec": 0 + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 201, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/jmespath_utils/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 56, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/jmespath_utils/envelopes.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 8, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 23, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/appconfig.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 151, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 272, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/dynamodb.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 178, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 7, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/secrets.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 129, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/ssm.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 277, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 17, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 25, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/apigw.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 25, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/apigwv2.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 25, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 42, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/cloudwatch.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 32, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/dynamodb.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 34, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/event_bridge.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 25, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/kafka.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 35, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/kinesis.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 35, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/lambda_function_url.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 25, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/sns.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 59, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/sqs.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 30, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 4, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 105, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/alb.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 14, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/apigw.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 77, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/apigwv2.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 55, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/cloudwatch.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 33, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/dynamodb.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 26, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/event_bridge.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 14, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/kafka.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 47, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/kinesis.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 31, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/lambda_function_url.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 12, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/s3.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 49, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/s3_object_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 40, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/ses.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 52, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/sns.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 36, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/sqs.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 32, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/parser.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 128, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/pydantic.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 1, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/types.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 11, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/typing/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 5, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/typing/lambda_client_context.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 18, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/typing/lambda_client_context_mobile_client.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 22, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/typing/lambda_cognito_identity.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 12, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/typing/lambda_context.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 64, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/validation/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 12, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/validation/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 39, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/validation/envelopes.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 9, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/validation/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 53, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/validation/validator.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 168, + "nosec": 0, + "skipped_tests": 0 } }, "results": [ { - "code": "369 try:\n370 if self.sampling_rate and random.random() <= float(self.sampling_rate):\n371 logger.debug(\"Setting log level to Debug due to sampling rate\")\n", + "code": "259 try:\n260 if self.sampling_rate and random.random() <= float(self.sampling_rate):\n261 logger.debug(\"Setting log level to Debug due to sampling rate\")\n", + "col_offset": 38, "filename": "aws_lambda_powertools/logging/logger.py", "issue_confidence": "HIGH", + "issue_cwe": { + "id": 330, + "link": "https://cwe.mitre.org/data/definitions/330.html" + }, "issue_severity": "LOW", "issue_text": "Standard pseudo-random generators are not suitable for security/cryptographic purposes.", - "line_number": 370, + "line_number": 260, "line_range": [ - 370 + 260 ], - "more_info": "https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b311-random", + "more_info": "https://bandit.readthedocs.io/en/1.7.4/blacklists/blacklist_calls.html#b311-random", "test_id": "B311", "test_name": "blacklist" } ] -} +} \ No newline at end of file diff --git a/docs/index.md b/docs/index.md index 95ce2c2a707..78b76c7bed6 100644 --- a/docs/index.md +++ b/docs/index.md @@ -14,7 +14,7 @@ A suite of utilities for AWS Lambda functions to ease adopting best practices su Powertools is available in the following formats: -* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:29**](#){: .copyMe}:clipboard: +* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:33**](#){: .copyMe}:clipboard: * **PyPi**: **`pip install aws-lambda-powertools`** ???+ hint "Support this project by using Lambda Layers :heart:" @@ -32,23 +32,28 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: | Region | Layer ARN | | ---------------- | -------------------------------------------------------------------------------------------------------- | - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | + | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | ??? question "Can't find our Lambda Layer for your preferred AWS region?" You can use [Serverless Application Repository (SAR)](#sar) method, our [CDK Layer Construct](https://github.com/aws-samples/cdk-lambda-powertools-python-layer){target="_blank"}, or PyPi like you normally would for any other library. @@ -62,7 +67,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: Type: AWS::Serverless::Function Properties: Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:29 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:33 ``` === "Serverless framework" @@ -72,7 +77,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: hello: handler: lambda_function.lambda_handler layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:29 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:33 ``` === "CDK" @@ -88,7 +93,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( self, id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:29" + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:33" ) aws_lambda.Function(self, 'sample-app-lambda', @@ -137,7 +142,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:29"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:33"] source_code_hash = filebase64sha256("lambda_function_payload.zip") } @@ -156,7 +161,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:29 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:33 ❯ amplify push -y @@ -167,7 +172,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:29 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:33 ? Do you want to edit the local lambda function now? No ``` @@ -175,7 +180,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: Change {region} to your AWS region, e.g. `eu-west-1` ```bash title="AWS CLI" - aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:29 --region {region} + aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:33 --region {region} ``` The pre-signed URL to download this Lambda Layer will be within `Location` key. diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md index a450c8788e4..67d821fe04f 100644 --- a/docs/utilities/data_classes.md +++ b/docs/utilities/data_classes.md @@ -68,12 +68,14 @@ Event Source | Data_class [Application Load Balancer](#application-load-balancer) | `ALBEvent` [AppSync Authorizer](#appsync-authorizer) | `AppSyncAuthorizerEvent` [AppSync Resolver](#appsync-resolver) | `AppSyncResolverEvent` +[CloudWatch Dashboard Custom Widget](#cloudwatch-dashboard-custom-widget) | `CloudWatchDashboardCustomWidgetEvent` [CloudWatch Logs](#cloudwatch-logs) | `CloudWatchLogsEvent` [CodePipeline Job Event](#codepipeline-job) | `CodePipelineJobEvent` [Cognito User Pool](#cognito-user-pool) | Multiple available under `cognito_user_pool_event` [Connect Contact Flow](#connect-contact-flow) | `ConnectContactFlowEvent` [DynamoDB streams](#dynamodb-streams) | `DynamoDBStreamEvent`, `DynamoDBRecordEventName` [EventBridge](#eventbridge) | `EventBridgeEvent` +[Kafka](#kafka) | `KafkaEvent` [Kinesis Data Stream](#kinesis-streams) | `KinesisStreamEvent` [Lambda Function URL](#lambda-function-url) | `LambdaFunctionUrlEvent` [Rabbit MQ](#rabbit-mq) | `RabbitMQEvent` @@ -441,6 +443,40 @@ In this example, we also use the new Logger `correlation_id` and built-in `corre } ``` +### CloudWatch Dashboard Custom Widget + +=== "app.py" + + ```python + from aws_lambda_powertools.utilities.data_classes import event_source, CloudWatchDashboardCustomWidgetEvent + + const DOCS = ` + ## Echo + A simple echo script. Anything passed in \`\`\`echo\`\`\` parameter is returned as the content of custom widget. + + ### Widget parameters + Param | Description + ---|--- + **echo** | The content to echo back + + ### Example parameters + \`\`\` yaml + echo:

Hello world

+ \`\`\` + ` + + @event_source(data_class=CloudWatchDashboardCustomWidgetEvent) + def lambda_handler(event: CloudWatchDashboardCustomWidgetEvent, context): + + if event.describe: + return DOCS + + # You can directly return HTML or JSON content + # Alternatively, you can return markdown that will be rendered by CloudWatch + echo = event.widget_context.params["echo"] + return { "markdown": f"# {echo}" } + ``` + ### CloudWatch Logs CloudWatch Logs events by default are compressed and base64 encoded. You can use the helper function provided to decode, @@ -817,6 +853,22 @@ attributes values (`AttributeValue`), as well as enums for stream view type (`St ``` +### Kafka + +This example is based on the AWS docs for [Amazon MSK](https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html){target="_blank"} and [self-managed Apache Kafka](https://docs.aws.amazon.com/lambda/latest/dg/with-kafka.html){target="_blank"}. + +=== "app.py" + + ```python + from aws_lambda_powertools.utilities.data_classes import event_source, KafkaEvent + + @event_source(data_class=KafkaEvent) + def lambda_handler(event: KafkaEvent, context): + for record in event.records: + do_something_with(record.decoded_key, record.json_value) + + ``` + ### Kinesis streams Kinesis events by default contain base64 encoded data. You can use the helper function to access the data either as json diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 97b005a9fb5..e97395ae56c 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -168,6 +168,8 @@ Parser comes with the following built-in models: | **APIGatewayProxyEventModel** | Lambda Event Source payload for Amazon API Gateway | | **APIGatewayProxyEventV2Model** | Lambda Event Source payload for Amazon API Gateway v2 payload | | **LambdaFunctionUrlModel** | Lambda Event Source payload for Lambda Function URL payload | +| **KafkaSelfManagedEventModel** | Lambda Event Source payload for self managed Kafka payload | +| **KafkaMskEventModel** | Lambda Event Source payload for AWS MSK payload | ### extending built-in models @@ -308,6 +310,7 @@ Parser comes with the following built-in envelopes, where `Model` in the return | **ApiGatewayEnvelope** | 1. Parses data using `APIGatewayProxyEventModel`.
2. Parses `body` key using your model and returns it. | `Model` | | **ApiGatewayV2Envelope** | 1. Parses data using `APIGatewayProxyEventV2Model`.
2. Parses `body` key using your model and returns it. | `Model` | | **LambdaFunctionUrlEnvelope** | 1. Parses data using `LambdaFunctionUrlModel`.
2. Parses `body` key using your model and returns it. | `Model` | +| **KafkaEnvelope** | 1. Parses data using `KafkaRecordModel`.
2. Parses `value` key using your model and returns it. | `Model` | ### Bringing your own envelope diff --git a/poetry.lock b/poetry.lock index c6696bdff76..2f1214191b4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -15,10 +15,10 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] [[package]] name = "aws-cdk-lib" @@ -61,30 +61,26 @@ stevedore = ">=1.20.0" [[package]] name = "black" -version = "21.12b0" +version = "22.8.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.6.2" [package.dependencies] -click = ">=7.1.2" +click = ">=8.0.0" dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0,<1" +pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = ">=0.2.6,<2.0.0" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} -typing-extensions = [ - {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, - {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, -] +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] colorama = ["colorama (>=0.4.3)"] d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -python2 = ["typed-ast (>=1.4.3)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] @@ -119,6 +115,41 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.13.8)"] +[[package]] +name = "cairocffi" +version = "1.3.0" +description = "cffi-based cairo bindings for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +cffi = ">=1.1.0" + +[package.extras] +doc = ["sphinx", "sphinx-rtd-theme"] +test = ["pytest-cov", "pytest-flake8", "pytest-isort", "pytest-runner"] +xcb = ["xcffib (>=0.3.2)"] + +[[package]] +name = "CairoSVG" +version = "2.5.2" +description = "A Simple SVG Converter based on Cairo" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +cairocffi = "*" +cssselect2 = "*" +defusedxml = "*" +pillow = "*" +tinycss2 = "*" + +[package.extras] +doc = ["sphinx", "sphinx-rtd-theme"] +test = ["pytest-cov", "pytest-flake8", "pytest-isort", "pytest-runner"] + [[package]] name = "cattrs" version = "1.0.0" @@ -131,7 +162,7 @@ python-versions = "*" attrs = ">=17.3" [package.extras] -dev = ["pendulum", "hypothesis", "pytest", "sphinx", "coverage", "tox", "flake8", "watchdog", "wheel", "bumpversion"] +dev = ["Sphinx", "bumpversion", "coverage", "flake8", "hypothesis", "pendulum", "pytest", "tox", "watchdog", "wheel"] [[package]] name = "cattrs" @@ -148,12 +179,23 @@ typing_extensions = {version = "*", markers = "python_version >= \"3.7\" and pyt [[package]] name = "certifi" -version = "2022.6.15" +version = "2022.6.15.1" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false python-versions = ">=3.6" +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + [[package]] name = "charset-normalizer" version = "2.0.12" @@ -211,6 +253,22 @@ tomli = {version = "*", optional = true, markers = "extra == \"toml\""} [package.extras] toml = ["tomli"] +[[package]] +name = "cssselect2" +version = "0.6.0" +description = "CSS selectors for Python ElementTree" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +tinycss2 = "*" +webencodings = "*" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["coverage[toml]", "pytest", "pytest-cov", "pytest-flake8", "pytest-isort"] + [[package]] name = "dataclasses" version = "0.8" @@ -227,6 +285,14 @@ category = "dev" optional = false python-versions = ">=3.5" +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + [[package]] name = "dnspython" version = "2.2.1" @@ -236,8 +302,8 @@ optional = true python-versions = ">=3.6,<4.0" [package.extras] -dnssec = ["cryptography (>=2.6,<37.0)"] curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] +dnssec = ["cryptography (>=2.6,<37.0)"] doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] idna = ["idna (>=2.1,<4.0)"] trio = ["trio (>=0.14,<0.20)"] @@ -265,7 +331,7 @@ python-versions = "*" [[package]] name = "exceptiongroup" -version = "1.0.0rc8" +version = "1.0.0rc9" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false @@ -294,7 +360,7 @@ optional = false python-versions = "*" [package.extras] -devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] [[package]] name = "filelock" @@ -324,7 +390,7 @@ pyflakes = ">=2.3.0,<2.4.0" [[package]] name = "flake8-bugbear" -version = "22.8.23" +version = "22.9.11" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false @@ -388,6 +454,7 @@ python-versions = ">=3.6,<4.0" attrs = "*" eradicate = ">=2.0,<3.0" flake8 = ">=3.5,<6" +setuptools = "*" [[package]] name = "flake8-fixme" @@ -420,6 +487,9 @@ category = "dev" optional = false python-versions = "*" +[package.dependencies] +setuptools = "*" + [[package]] name = "future" version = "0.18.2" @@ -440,7 +510,7 @@ python-versions = "*" python-dateutil = ">=2.8.1" [package.extras] -dev = ["wheel", "flake8", "markdown", "twine"] +dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "gitdb" @@ -475,20 +545,20 @@ python-versions = ">=3.5" [[package]] name = "importlib-metadata" -version = "4.12.0" +version = "4.8.3" description = "Read metadata from Python packages" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" [package.dependencies] typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy", "pytest-perf (>=0.9.2)"] [[package]] name = "importlib-resources" @@ -502,8 +572,8 @@ python-versions = ">=3.6" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] [[package]] name = "iniconfig" @@ -522,10 +592,10 @@ optional = false python-versions = ">=3.6.1,<4.0" [package.extras] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] -requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] +requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] name = "jinja2" @@ -579,7 +649,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" MarkupSafe = ">=0.9.2" [package.extras] -babel = ["babel"] +babel = ["Babel"] lingua = ["lingua"] [[package]] @@ -649,8 +719,8 @@ packaging = "*" "ruamel.yaml" = "*" [package.extras] -test = ["flake8 (>=3.0)", "coverage"] -dev = ["pypandoc (>=1.4)", "flake8 (>=3.0)", "coverage"] +dev = ["coverage", "flake8 (>=3.0)", "pypandoc (>=1.4)"] +test = ["coverage", "flake8 (>=3.0)"] [[package]] name = "mkdocs" @@ -690,19 +760,22 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "8.4.1" +version = "8.5.0" description = "Documentation that simply works" category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] +cairosvg = ">=2.5" jinja2 = ">=3.0.2" markdown = ">=3.2" mkdocs = ">=1.3.0" mkdocs-material-extensions = ">=1.0.3" +pillow = ">=8.0" pygments = ">=2.12" pymdown-extensions = ">=9.4" +requests = ">=2.26" [[package]] name = "mkdocs-material-extensions" @@ -766,8 +839,8 @@ typing-extensions = ">=4.1.0" [[package]] name = "mypy-boto3-dynamodb" -version = "1.24.55.post1" -description = "Type annotations for boto3.DynamoDB 1.24.55 service generated with mypy-boto3-builder 7.11.7" +version = "1.24.60" +description = "Type annotations for boto3.DynamoDB 1.24.60 service generated with mypy-boto3-builder 7.11.8" category = "dev" optional = false python-versions = ">=3.7" @@ -821,8 +894,8 @@ typing-extensions = ">=4.1.0" [[package]] name = "mypy-boto3-ssm" -version = "1.24.39.post2" -description = "Type annotations for boto3.SSM 1.24.39 service generated with mypy-boto3-builder 7.10.1" +version = "1.24.69" +description = "Type annotations for boto3.SSM 1.24.69 service generated with mypy-boto3-builder 7.11.8" category = "dev" optional = false python-versions = ">=3.7" @@ -888,6 +961,18 @@ python-versions = ">= 3.6" mako = "*" markdown = ">=3.0" +[[package]] +name = "Pillow" +version = "9.2.0" +description = "Python Imaging Library (Fork)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + [[package]] name = "platformdirs" version = "2.4.0" @@ -912,8 +997,8 @@ python-versions = ">=3.6" importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] -testing = ["pytest-benchmark", "pytest"] -dev = ["tox", "pre-commit"] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "publication" @@ -947,6 +1032,14 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [[package]] name = "pydantic" version = "1.9.2" @@ -1070,7 +1163,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["virtualenv", "pytest-xdist", "six", "process-tests", "hunter", "fields"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytest-forked" @@ -1096,7 +1189,7 @@ python-versions = ">=3.6" pytest = ">=5.0" [package.extras] -dev = ["pre-commit", "tox", "pytest-asyncio"] +dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-xdist" @@ -1234,6 +1327,18 @@ botocore = ">=1.12.36,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] +[[package]] +name = "setuptools" +version = "59.6.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=8.2)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-inline-tabs", "sphinxcontrib-towncrier"] +testing = ["flake8-2020", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "paver", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy", "pytest-virtualenv (>=1.2.7)", "pytest-xdist", "sphinx", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -1262,6 +1367,21 @@ python-versions = ">=3.6" importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} pbr = ">=2.0.0,<2.1.0 || >2.1.0" +[[package]] +name = "tinycss2" +version = "1.1.1" +description = "A tiny CSS parser" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["coverage[toml]", "pytest", "pytest-cov", "pytest-flake8", "pytest-isort"] + [[package]] name = "tomli" version = "1.2.3" @@ -1280,7 +1400,7 @@ python-versions = ">=3.6" [[package]] name = "types-requests" -version = "2.28.9" +version = "2.28.10" description = "Typing stubs for requests" category = "dev" optional = false @@ -1291,7 +1411,7 @@ types-urllib3 = "<1.27" [[package]] name = "types-urllib3" -version = "1.26.23" +version = "1.26.24" description = "Typing stubs for urllib3" category = "dev" optional = false @@ -1314,8 +1434,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1329,6 +1449,14 @@ python-versions = ">=3.6" [package.extras] watchmedo = ["PyYAML (>=3.10)"] +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "wrapt" version = "1.14.1" @@ -1352,15 +1480,15 @@ requests = ">=2.0,<3.0" [[package]] name = "zipp" -version = "3.8.1" +version = "3.6.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] [extras] pydantic = ["pydantic", "email-validator"] @@ -1368,7 +1496,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "a5ff8f9945c42eeee596b973e484efae6bcfde17a0e37cc708cac05b635cec1f" +content-hash = "2fcce51647649bb83159d00303d6473ce13012058bb04ddf1c254e4bf6f40801" [metadata.files] atomicwrites = [ @@ -1391,8 +1519,29 @@ bandit = [ {file = "bandit-1.7.1.tar.gz", hash = "sha256:a81b00b5436e6880fa8ad6799bc830e02032047713cbb143a12939ac67eb756c"}, ] black = [ - {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, - {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, + {file = "black-22.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd"}, + {file = "black-22.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27"}, + {file = "black-22.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8166b7bfe5dcb56d325385bd1d1e0f635f24aae14b3ae437102dedc0c186747"}, + {file = "black-22.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd82842bb272297503cbec1a2600b6bfb338dae017186f8f215c8958f8acf869"}, + {file = "black-22.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d839150f61d09e7217f52917259831fe2b689f5c8e5e32611736351b89bb2a90"}, + {file = "black-22.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a05da0430bd5ced89176db098567973be52ce175a55677436a271102d7eaa3fe"}, + {file = "black-22.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a098a69a02596e1f2a58a2a1c8d5a05d5a74461af552b371e82f9fa4ada8342"}, + {file = "black-22.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5594efbdc35426e35a7defa1ea1a1cb97c7dbd34c0e49af7fb593a36bd45edab"}, + {file = "black-22.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983526af1bea1e4cf6768e649990f28ee4f4137266921c2c3cee8116ae42ec3"}, + {file = "black-22.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b2c25f8dea5e8444bdc6788a2f543e1fb01494e144480bc17f806178378005e"}, + {file = "black-22.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:78dd85caaab7c3153054756b9fe8c611efa63d9e7aecfa33e533060cb14b6d16"}, + {file = "black-22.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cea1b2542d4e2c02c332e83150e41e3ca80dc0fb8de20df3c5e98e242156222c"}, + {file = "black-22.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b879eb439094751185d1cfdca43023bc6786bd3c60372462b6f051efa6281a5"}, + {file = "black-22.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a12e4e1353819af41df998b02c6742643cfef58282915f781d0e4dd7a200411"}, + {file = "black-22.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3a73f66b6d5ba7288cd5d6dad9b4c9b43f4e8a4b789a94bf5abfb878c663eb3"}, + {file = "black-22.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:e981e20ec152dfb3e77418fb616077937378b322d7b26aa1ff87717fb18b4875"}, + {file = "black-22.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ce13ffed7e66dda0da3e0b2eb1bdfc83f5812f66e09aca2b0978593ed636b6c"}, + {file = "black-22.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:32a4b17f644fc288c6ee2bafdf5e3b045f4eff84693ac069d87b1a347d861497"}, + {file = "black-22.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ad827325a3a634bae88ae7747db1a395d5ee02cf05d9aa7a9bd77dfb10e940c"}, + {file = "black-22.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53198e28a1fb865e9fe97f88220da2e44df6da82b18833b588b1883b16bb5d41"}, + {file = "black-22.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:bc4d4123830a2d190e9cc42a2e43570f82ace35c3aeb26a512a2102bce5af7ec"}, + {file = "black-22.8.0-py3-none-any.whl", hash = "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4"}, + {file = "black-22.8.0.tar.gz", hash = "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e"}, ] boto3 = [ {file = "boto3-1.23.10-py3-none-any.whl", hash = "sha256:40d08614f17a69075e175c02c5d5aab69a6153fd50e40fa7057b913ac7bf40e7"}, @@ -1402,6 +1551,13 @@ botocore = [ {file = "botocore-1.26.10-py3-none-any.whl", hash = "sha256:8a4a984bf901ccefe40037da11ba2abd1ddbcb3b490a492b7f218509c99fc12f"}, {file = "botocore-1.26.10.tar.gz", hash = "sha256:5df2cf7ebe34377470172bd0bbc582cf98c5cbd02da0909a14e9e2885ab3ae9c"}, ] +cairocffi = [ + {file = "cairocffi-1.3.0.tar.gz", hash = "sha256:108a3a7cb09e203bdd8501d9baad91d786d204561bd71e9364e8b34897c47b91"}, +] +CairoSVG = [ + {file = "CairoSVG-2.5.2-py3-none-any.whl", hash = "sha256:98c276b7e4f0caf01e5c7176765c104ffa1aa1461d63b2053b04ab663cf7052b"}, + {file = "CairoSVG-2.5.2.tar.gz", hash = "sha256:b0b9929cf5dba005178d746a8036fcf0025550f498ca54db61873322384783bc"}, +] cattrs = [ {file = "cattrs-1.0.0-py2.py3-none-any.whl", hash = "sha256:616972ae3dfa6e623a40ad3cb845420e64942989152774ab055e5c2b2f89f997"}, {file = "cattrs-1.0.0.tar.gz", hash = "sha256:b7ab5cf8ad127c42eefd01410c1c6e28569a45a255ea80ed968511873c433c7a"}, @@ -1409,8 +1565,74 @@ cattrs = [ {file = "cattrs-22.1.0.tar.gz", hash = "sha256:94b67b64cf92c994f8784c40c082177dc916e0489a73a9a36b24eb18a9db40c6"}, ] certifi = [ - {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, - {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, + {file = "certifi-2022.6.15.1-py3-none-any.whl", hash = "sha256:43dadad18a7f168740e66944e4fa82c6611848ff9056ad910f8f7a3e46ab89e0"}, + {file = "certifi-2022.6.15.1.tar.gz", hash = "sha256:cffdcd380919da6137f76633531a5817e3a9f268575c128249fb637e4f9e73fb"}, +] +cffi = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, ] charset-normalizer = [ {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, @@ -1477,6 +1699,10 @@ coverage = [ {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"}, {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, ] +cssselect2 = [ + {file = "cssselect2-0.6.0-py3-none-any.whl", hash = "sha256:3a83b2a68370c69c9cd3fcb88bbfaebe9d22edeef2c22d1ff3e1ed9c7fa45ed8"}, + {file = "cssselect2-0.6.0.tar.gz", hash = "sha256:5b5d6dea81a5eb0c9ca39f116c8578dd413778060c94c1f51196371618909325"}, +] dataclasses = [ {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, @@ -1485,6 +1711,10 @@ decorator = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +defusedxml = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] dnspython = [ {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, @@ -1498,8 +1728,8 @@ eradicate = [ {file = "eradicate-2.1.0.tar.gz", hash = "sha256:aac7384ab25b1bf21c4c012de9b4bf8398945a14c98c911545b2ea50ab558014"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.0.0rc8-py3-none-any.whl", hash = "sha256:ab0a968e1ef769e55d9a596f4a89f7be9ffedbc9fdefdb77cc68cf5c33ce1035"}, - {file = "exceptiongroup-1.0.0rc8.tar.gz", hash = "sha256:6990c24f06b8d33c8065cfe43e5e8a4bfa384e0358be036af9cc60b6321bd11a"}, + {file = "exceptiongroup-1.0.0rc9-py3-none-any.whl", hash = "sha256:2e3c3fc1538a094aab74fad52d6c33fc94de3dfee3ee01f187c0e0c72aec5337"}, + {file = "exceptiongroup-1.0.0rc9.tar.gz", hash = "sha256:9086a4a21ef9b31c72181c77c040a074ba0889ee56a7b289ff0afb0d97655f96"}, ] execnet = [ {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, @@ -1518,8 +1748,8 @@ flake8 = [ {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-22.8.23.tar.gz", hash = "sha256:de0717d11124a082118dd08387b34fd86b2721642ec2d8e92be66cfa5ea7c445"}, - {file = "flake8_bugbear-22.8.23-py3-none-any.whl", hash = "sha256:1b0ebe0873d1cd55bf9f1588bfcb930db339018ef44a3981a26532daa9fd14a8"}, + {file = "flake8-bugbear-22.9.11.tar.gz", hash = "sha256:39236c0e97160d1ab05d9f87422173d16e925a6220b3635bfc4aee766bf8194a"}, + {file = "flake8_bugbear-22.9.11-py3-none-any.whl", hash = "sha256:e74350a4cfc670e184f3433c223b1e7378f1cf8345ded6c8f12ac1a50c5df22b"}, ] flake8-builtins = [ {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, @@ -1568,8 +1798,8 @@ idna = [ {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, - {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, + {file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"}, + {file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"}, ] importlib-resources = [ {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, @@ -1698,8 +1928,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.2-py3-none-any.whl", hash = "sha256:2e67956cb01823dd2418e2833f3623dee8604cdf223bddd005fe36226a56f6ef"}, ] mkdocs-material = [ - {file = "mkdocs-material-8.4.1.tar.gz", hash = "sha256:92c70f94b2e1f8a05d9e05eec1c7af9dffc516802d69222329db89503c97b4f3"}, - {file = "mkdocs_material-8.4.1-py2.py3-none-any.whl", hash = "sha256:319a6254819ce9d864ff79de48c43842fccfdebb43e4e6820eef75216f8cfb0a"}, + {file = "mkdocs-material-8.5.0.tar.gz", hash = "sha256:1b9e03b93c26db7c1b520480978024916eda73b49eb5818820cc10f4665f00fc"}, + {file = "mkdocs_material-8.5.0-py2.py3-none-any.whl", hash = "sha256:1bfd05e6e159db2c5f95821dc3c7afdc7a5a3a7acc544c3102f7acb28691f407"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"}, @@ -1743,8 +1973,8 @@ mypy-boto3-cloudwatch = [ {file = "mypy_boto3_cloudwatch-1.24.55-py3-none-any.whl", hash = "sha256:23faf8fdfe928f9dcce453a60b03bda69177554eb88c2d7e5240ff91b5b14388"}, ] mypy-boto3-dynamodb = [ - {file = "mypy-boto3-dynamodb-1.24.55.post1.tar.gz", hash = "sha256:c469223c15556d93d247d38c0c31ce3c08d8073ca4597158a27abc70b8d7fbee"}, - {file = "mypy_boto3_dynamodb-1.24.55.post1-py3-none-any.whl", hash = "sha256:c762975d023b356c573d58105c7bfc1b9e7ee62c1299f09784e9dede533179e1"}, + {file = "mypy-boto3-dynamodb-1.24.60.tar.gz", hash = "sha256:aa552233fa8357d99f4a1021ef65b98679e26ebc35d04c31a9d70a4db779c236"}, + {file = "mypy_boto3_dynamodb-1.24.60-py3-none-any.whl", hash = "sha256:df8e91bb25dd6e4090aef22d33504a5e9e305e45e3262d81e7223df4b6ddee5f"}, ] mypy-boto3-lambda = [ {file = "mypy-boto3-lambda-1.24.54.tar.gz", hash = "sha256:c76d28d84bdf94c8980acd85bc07f2747559ca11a990fd6785c9c2389e13aff1"}, @@ -1763,8 +1993,8 @@ mypy-boto3-secretsmanager = [ {file = "mypy_boto3_secretsmanager-1.24.54-py3-none-any.whl", hash = "sha256:b89c9a0ff65a8ab2c4e4d3f6e721a0477b7d0fec246ffc08e4378420eb50b4d0"}, ] mypy-boto3-ssm = [ - {file = "mypy-boto3-ssm-1.24.39.post2.tar.gz", hash = "sha256:2859bdcef110d9cc53007a7adba9c765e804b886f98d742a496bb8f7dac07308"}, - {file = "mypy_boto3_ssm-1.24.39.post2-py3-none-any.whl", hash = "sha256:bfdb434c513fbb1f3bc4b5c158ed4e7a46cb578e5eb01e818d45f4f38296ef2c"}, + {file = "mypy-boto3-ssm-1.24.69.tar.gz", hash = "sha256:e084dc97ff946ef46fb36366db5fefaf948e761ed9488f91e281485e07885ad1"}, + {file = "mypy_boto3_ssm-1.24.69-py3-none-any.whl", hash = "sha256:066af3da69da431353db4862a3d8f49ad8021be122e80b7333a6746e39d35012"}, ] mypy-boto3-xray = [ {file = "mypy-boto3-xray-1.24.36.post1.tar.gz", hash = "sha256:104f1ecf7f1f6278c582201e71a7ab64843d3a3fdc8f23295cf68788cc77e9bb"}, @@ -1790,6 +2020,66 @@ pdoc3 = [ {file = "pdoc3-0.10.0-py3-none-any.whl", hash = "sha256:ba45d1ada1bd987427d2bf5cdec30b2631a3ff5fb01f6d0e77648a572ce6028b"}, {file = "pdoc3-0.10.0.tar.gz", hash = "sha256:5f22e7bcb969006738e1aa4219c75a32f34c2d62d46dc9d2fb2d3e0b0287e4b7"}, ] +Pillow = [ + {file = "Pillow-9.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:a9c9bc489f8ab30906d7a85afac4b4944a572a7432e00698a7239f44a44e6efb"}, + {file = "Pillow-9.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:510cef4a3f401c246cfd8227b300828715dd055463cdca6176c2e4036df8bd4f"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7888310f6214f19ab2b6df90f3f06afa3df7ef7355fc025e78a3044737fab1f5"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831e648102c82f152e14c1a0938689dbb22480c548c8d4b8b248b3e50967b88c"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cc1d2451e8a3b4bfdb9caf745b58e6c7a77d2e469159b0d527a4554d73694d1"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:136659638f61a251e8ed3b331fc6ccd124590eeff539de57c5f80ef3a9594e58"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6e8c66f70fb539301e064f6478d7453e820d8a2c631da948a23384865cd95544"}, + {file = "Pillow-9.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37ff6b522a26d0538b753f0b4e8e164fdada12db6c6f00f62145d732d8a3152e"}, + {file = "Pillow-9.2.0-cp310-cp310-win32.whl", hash = "sha256:c79698d4cd9318d9481d89a77e2d3fcaeff5486be641e60a4b49f3d2ecca4e28"}, + {file = "Pillow-9.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:254164c57bab4b459f14c64e93df11eff5ded575192c294a0c49270f22c5d93d"}, + {file = "Pillow-9.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:adabc0bce035467fb537ef3e5e74f2847c8af217ee0be0455d4fec8adc0462fc"}, + {file = "Pillow-9.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:336b9036127eab855beec9662ac3ea13a4544a523ae273cbf108b228ecac8437"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50dff9cc21826d2977ef2d2a205504034e3a4563ca6f5db739b0d1026658e004"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb6259196a589123d755380b65127ddc60f4c64b21fc3bb46ce3a6ea663659b0"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0554af24df2bf96618dac71ddada02420f946be943b181108cac55a7a2dcd4"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:15928f824870535c85dbf949c09d6ae7d3d6ac2d6efec80f3227f73eefba741c"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:bdd0de2d64688ecae88dd8935012c4a72681e5df632af903a1dca8c5e7aa871a"}, + {file = "Pillow-9.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5b87da55a08acb586bad5c3aa3b86505f559b84f39035b233d5bf844b0834b1"}, + {file = "Pillow-9.2.0-cp311-cp311-win32.whl", hash = "sha256:b6d5e92df2b77665e07ddb2e4dbd6d644b78e4c0d2e9272a852627cdba0d75cf"}, + {file = "Pillow-9.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6bf088c1ce160f50ea40764f825ec9b72ed9da25346216b91361eef8ad1b8f8c"}, + {file = "Pillow-9.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:2c58b24e3a63efd22554c676d81b0e57f80e0a7d3a5874a7e14ce90ec40d3069"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef7592281f7c174d3d6cbfbb7ee5984a671fcd77e3fc78e973d492e9bf0eb3f"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd7b9c7139dc8258d164b55696ecd16c04607f1cc33ba7af86613881ffe4ac8"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a138441e95562b3c078746a22f8fca8ff1c22c014f856278bdbdd89ca36cff1b"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:93689632949aff41199090eff5474f3990b6823404e45d66a5d44304e9cdc467"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:f3fac744f9b540148fa7715a435d2283b71f68bfb6d4aae24482a890aed18b59"}, + {file = "Pillow-9.2.0-cp37-cp37m-win32.whl", hash = "sha256:fa768eff5f9f958270b081bb33581b4b569faabf8774726b283edb06617101dc"}, + {file = "Pillow-9.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:69bd1a15d7ba3694631e00df8de65a8cb031911ca11f44929c97fe05eb9b6c1d"}, + {file = "Pillow-9.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:030e3460861488e249731c3e7ab59b07c7853838ff3b8e16aac9561bb345da14"}, + {file = "Pillow-9.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:74a04183e6e64930b667d321524e3c5361094bb4af9083db5c301db64cd341f3"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d33a11f601213dcd5718109c09a52c2a1c893e7461f0be2d6febc2879ec2402"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fd6f5e3c0e4697fa7eb45b6e93996299f3feee73a3175fa451f49a74d092b9f"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a647c0d4478b995c5e54615a2e5360ccedd2f85e70ab57fbe817ca613d5e63b8"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:4134d3f1ba5f15027ff5c04296f13328fecd46921424084516bdb1b2548e66ff"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:bc431b065722a5ad1dfb4df354fb9333b7a582a5ee39a90e6ffff688d72f27a1"}, + {file = "Pillow-9.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1536ad017a9f789430fb6b8be8bf99d2f214c76502becc196c6f2d9a75b01b76"}, + {file = "Pillow-9.2.0-cp38-cp38-win32.whl", hash = "sha256:2ad0d4df0f5ef2247e27fc790d5c9b5a0af8ade9ba340db4a73bb1a4a3e5fb4f"}, + {file = "Pillow-9.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:ec52c351b35ca269cb1f8069d610fc45c5bd38c3e91f9ab4cbbf0aebc136d9c8"}, + {file = "Pillow-9.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ed2c4ef2451de908c90436d6e8092e13a43992f1860275b4d8082667fbb2ffc"}, + {file = "Pillow-9.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ad2f835e0ad81d1689f1b7e3fbac7b01bb8777d5a985c8962bedee0cc6d43da"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea98f633d45f7e815db648fd7ff0f19e328302ac36427343e4432c84432e7ff4"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7761afe0126d046974a01e030ae7529ed0ca6a196de3ec6937c11df0df1bc91c"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a54614049a18a2d6fe156e68e188da02a046a4a93cf24f373bffd977e943421"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:5aed7dde98403cd91d86a1115c78d8145c83078e864c1de1064f52e6feb61b20"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:13b725463f32df1bfeacbf3dd197fb358ae8ebcd8c5548faa75126ea425ccb60"}, + {file = "Pillow-9.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:808add66ea764ed97d44dda1ac4f2cfec4c1867d9efb16a33d158be79f32b8a4"}, + {file = "Pillow-9.2.0-cp39-cp39-win32.whl", hash = "sha256:337a74fd2f291c607d220c793a8135273c4c2ab001b03e601c36766005f36885"}, + {file = "Pillow-9.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:fac2d65901fb0fdf20363fbd345c01958a742f2dc62a8dd4495af66e3ff502a4"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ad2277b185ebce47a63f4dc6302e30f05762b688f8dc3de55dbae4651872cdf3"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c7b502bc34f6e32ba022b4a209638f9e097d7a9098104ae420eb8186217ebbb"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1f14f5f691f55e1b47f824ca4fdcb4b19b4323fe43cc7bb105988cad7496be"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:dfe4c1fedfde4e2fbc009d5ad420647f7730d719786388b7de0999bf32c0d9fd"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:f07f1f00e22b231dd3d9b9208692042e29792d6bd4f6639415d2f23158a80013"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1802f34298f5ba11d55e5bb09c31997dc0c6aed919658dfdf0198a2fe75d5490"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17d4cafe22f050b46d983b71c707162d63d796a1235cdf8b9d7a112e97b15bac"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96b5e6874431df16aee0c1ba237574cb6dff1dcb173798faa6a9d8b399a05d0e"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0030fdbd926fb85844b8b92e2f9449ba89607231d3dd597a21ae72dc7fe26927"}, + {file = "Pillow-9.2.0.tar.gz", hash = "sha256:75e636fd3e0fb872693f23ccb8a5ff2cd578801251f3a4f6854c6a5d437d3c04"}, +] platformdirs = [ {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, @@ -1813,6 +2103,10 @@ pycodestyle = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] pydantic = [ {file = "pydantic-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c9e04a6cdb7a363d7cb3ccf0efea51e0abb48e180c0d31dca8d247967d85c6e"}, {file = "pydantic-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fafe841be1103f340a24977f61dee76172e4ae5f647ab9e7fd1e1fca51524f08"}, @@ -2039,6 +2333,10 @@ s3transfer = [ {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, ] +setuptools = [ + {file = "setuptools-59.6.0-py3-none-any.whl", hash = "sha256:4ce92f1e1f8f01233ee9952c04f6b81d1e02939d6e1b488428154974a4d0783e"}, + {file = "setuptools-59.6.0.tar.gz", hash = "sha256:22c7348c6d2976a52632c67f7ab0cdf40147db7789f9aed18734643fe9cf3373"}, +] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -2051,6 +2349,10 @@ stevedore = [ {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, ] +tinycss2 = [ + {file = "tinycss2-1.1.1-py3-none-any.whl", hash = "sha256:fe794ceaadfe3cf3e686b22155d0da5780dd0e273471a51846d0a02bc204fec8"}, + {file = "tinycss2-1.1.1.tar.gz", hash = "sha256:b2e44dd8883c360c35dd0d1b5aad0b610e5156c2cb3b33434634e539ead9d8bf"}, +] tomli = [ {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, @@ -2082,12 +2384,12 @@ typed-ast = [ {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, ] types-requests = [ - {file = "types-requests-2.28.9.tar.gz", hash = "sha256:feaf581bd580497a47fe845d506fa3b91b484cf706ff27774e87659837de9962"}, - {file = "types_requests-2.28.9-py3-none-any.whl", hash = "sha256:86cb66d3de2f53eac5c09adc42cf6547eefbd0c7e1210beca1ee751c35d96083"}, + {file = "types-requests-2.28.10.tar.gz", hash = "sha256:97d8f40aa1ffe1e58c3726c77d63c182daea9a72d9f1fa2cafdea756b2a19f2c"}, + {file = "types_requests-2.28.10-py3-none-any.whl", hash = "sha256:45b485725ed58752f2b23461252f1c1ad9205b884a1e35f786bb295525a3e16a"}, ] types-urllib3 = [ - {file = "types-urllib3-1.26.23.tar.gz", hash = "sha256:b78e819f0e350221d0689a5666162e467ba3910737bafda14b5c2c85e9bb1e56"}, - {file = "types_urllib3-1.26.23-py3-none-any.whl", hash = "sha256:333e675b188a1c1fd980b4b352f9e40572413a4c1ac689c23cd546e96310070a"}, + {file = "types-urllib3-1.26.24.tar.gz", hash = "sha256:a1b3aaea7dda3eb1b51699ee723aadd235488e4dc4648e030f09bc429ecff42f"}, + {file = "types_urllib3-1.26.24-py3-none-any.whl", hash = "sha256:cf7918503d02d3576e503bbfb419b0e047c4617653bba09624756ab7175e15c9"}, ] typing-extensions = [ {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, @@ -2124,6 +2426,10 @@ watchdog = [ {file = "watchdog-2.1.9-py3-none-win_ia64.whl", hash = "sha256:ad576a565260d8f99d97f2e64b0f97a48228317095908568a9d5c786c829d428"}, {file = "watchdog-2.1.9.tar.gz", hash = "sha256:43ce20ebb36a51f21fa376f76d1d4692452b2527ccd601950d69ed36b9e21609"}, ] +webencodings = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] wrapt = [ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, @@ -2195,6 +2501,6 @@ xenon = [ {file = "xenon-0.9.0.tar.gz", hash = "sha256:d2b9cb6c6260f771a432c1e588e51fddb17858f88f73ef641e7532f7a5f58fb8"}, ] zipp = [ - {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, - {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, + {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, + {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, ] diff --git a/pyproject.toml b/pyproject.toml index aa6bfec1c1e..5717c622545 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ email-validator = {version = "*", optional = true } # issue #1148 coverage = {extras = ["toml"], version = "^6.2"} pytest = "^7.0.1" -black = "^21.12b0" +black = "^22.8" flake8-builtins = "^1.5.3" flake8-comprehensions = "^3.7.0" flake8-debugger = "^4.0.0" @@ -69,7 +69,7 @@ mypy-boto3-xray = { version = "^1.24.0", python = ">=3.7" } types-requests = "^2.28.8" typing-extensions = { version = "^4.3.0", python = ">=3.7" } python-snappy = "^0.6.1" -mkdocs-material = { version = "^8.3.9", python = ">=3.7" } +mkdocs-material = { version = "^8.5.0", python = ">=3.7" } filelock = { version = "^3.8.0", python = ">=3.7" } [tool.poetry.extras] diff --git a/tests/events/cloudWatchDashboardEvent.json b/tests/events/cloudWatchDashboardEvent.json new file mode 100644 index 00000000000..fd2d3be62d6 --- /dev/null +++ b/tests/events/cloudWatchDashboardEvent.json @@ -0,0 +1,38 @@ +{ + "original": "param-to-widget", + "widgetContext": { + "dashboardName": "Name-of-current-dashboard", + "widgetId": "widget-16", + "domain": "https://us-east-1.console.aws.amazon.com", + "accountId": "123456789123", + "locale": "en", + "timezone": { + "label": "UTC", + "offsetISO": "+00:00", + "offsetInMinutes": 0 + }, + "period": 300, + "isAutoPeriod": true, + "timeRange": { + "mode": "relative", + "start": 1627236199729, + "end": 1627322599729, + "relativeStart": 86400012, + "zoom": { + "start": 1627276030434, + "end": 1627282956521 + } + }, + "theme": "light", + "linkCharts": true, + "title": "Tweets for Amazon website problem", + "forms": { + "all": {} + }, + "params": { + "original": "param-to-widget" + }, + "width": 588, + "height": 369 + } +} diff --git a/tests/events/kafkaEventMsk.json b/tests/events/kafkaEventMsk.json new file mode 100644 index 00000000000..5a35b89680a --- /dev/null +++ b/tests/events/kafkaEventMsk.json @@ -0,0 +1,35 @@ +{ + "eventSource":"aws:kafka", + "eventSourceArn":"arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers":"b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records":{ + "mytopic-0":[ + { + "topic":"mytopic", + "partition":0, + "offset":15, + "timestamp":1545084650987, + "timestampType":"CREATE_TIME", + "key":"cmVjb3JkS2V5", + "value":"eyJrZXkiOiJ2YWx1ZSJ9", + "headers":[ + { + "headerKey":[ + 104, + 101, + 97, + 100, + 101, + 114, + 86, + 97, + 108, + 117, + 101 + ] + } + ] + } + ] + } +} diff --git a/tests/events/kafkaEventSelfManaged.json b/tests/events/kafkaEventSelfManaged.json new file mode 100644 index 00000000000..22985dd11dd --- /dev/null +++ b/tests/events/kafkaEventSelfManaged.json @@ -0,0 +1,34 @@ +{ + "eventSource":"aws:SelfManagedKafka", + "bootstrapServers":"b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records":{ + "mytopic-0":[ + { + "topic":"mytopic", + "partition":0, + "offset":15, + "timestamp":1545084650987, + "timestampType":"CREATE_TIME", + "key":"cmVjb3JkS2V5", + "value":"eyJrZXkiOiJ2YWx1ZSJ9", + "headers":[ + { + "headerKey":[ + 104, + 101, + 97, + 100, + 101, + 114, + 86, + 97, + 108, + 117, + 101 + ] + } + ] + } + ] + } +} diff --git a/tests/functional/parser/schemas.py b/tests/functional/parser/schemas.py index 79a74f8eb53..b1b66c63379 100644 --- a/tests/functional/parser/schemas.py +++ b/tests/functional/parser/schemas.py @@ -91,3 +91,7 @@ class MyApiGatewayBusiness(BaseModel): class MyALambdaFuncUrlBusiness(BaseModel): message: str username: str + + +class MyLambdaKafkaBusiness(BaseModel): + key: str diff --git a/tests/functional/parser/test_kafka.py b/tests/functional/parser/test_kafka.py new file mode 100644 index 00000000000..f764106add4 --- /dev/null +++ b/tests/functional/parser/test_kafka.py @@ -0,0 +1,93 @@ +from typing import List + +from aws_lambda_powertools.utilities.parser import envelopes, event_parser +from aws_lambda_powertools.utilities.parser.models import ( + KafkaMskEventModel, + KafkaRecordModel, + KafkaSelfManagedEventModel, +) +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.parser.schemas import MyLambdaKafkaBusiness +from tests.functional.utils import load_event + + +@event_parser(model=MyLambdaKafkaBusiness, envelope=envelopes.KafkaEnvelope) +def handle_lambda_kafka_with_envelope(event: List[MyLambdaKafkaBusiness], _: LambdaContext): + assert event[0].key == "value" + assert len(event) == 1 + + +@event_parser(model=KafkaSelfManagedEventModel) +def handle_kafka_event(event: KafkaSelfManagedEventModel, _: LambdaContext): + return event + + +def test_kafka_msk_event_with_envelope(): + event = load_event("kafkaEventMsk.json") + handle_lambda_kafka_with_envelope(event, LambdaContext()) + + +def test_kafka_self_managed_event_with_envelope(): + event = load_event("kafkaEventSelfManaged.json") + handle_lambda_kafka_with_envelope(event, LambdaContext()) + + +def test_self_managed_kafka_event(): + json_event = load_event("kafkaEventSelfManaged.json") + event: KafkaSelfManagedEventModel = handle_kafka_event(json_event, LambdaContext()) + assert event.eventSource == "aws:SelfManagedKafka" + bootstrap_servers = [ + "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + ] + assert event.bootstrapServers == bootstrap_servers + + records = list(event.records["mytopic-0"]) + assert len(records) == 1 + record: KafkaRecordModel = records[0] + assert record.topic == "mytopic" + assert record.partition == 0 + assert record.offset == 15 + assert record.timestamp is not None + convert_time = int(round(record.timestamp.timestamp() * 1000)) + assert convert_time == 1545084650987 + assert record.timestampType == "CREATE_TIME" + assert record.key == b"recordKey" + assert record.value == '{"key":"value"}' + assert len(record.headers) == 1 + assert record.headers[0]["headerKey"] == b"headerValue" + + +@event_parser(model=KafkaMskEventModel) +def handle_msk_event(event: KafkaMskEventModel, _: LambdaContext): + return event + + +def test_kafka_msk_event(): + json_event = load_event("kafkaEventMsk.json") + event: KafkaMskEventModel = handle_msk_event(json_event, LambdaContext()) + assert event.eventSource == "aws:kafka" + bootstrap_servers = [ + "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + ] + assert event.bootstrapServers == bootstrap_servers + assert ( + event.eventSourceArn + == "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4" + ) + + records = list(event.records["mytopic-0"]) + assert len(records) == 1 + record: KafkaRecordModel = records[0] + assert record.topic == "mytopic" + assert record.partition == 0 + assert record.offset == 15 + assert record.timestamp is not None + convert_time = int(round(record.timestamp.timestamp() * 1000)) + assert convert_time == 1545084650987 + assert record.timestampType == "CREATE_TIME" + assert record.key == b"recordKey" + assert record.value == '{"key":"value"}' + assert len(record.headers) == 1 + assert record.headers[0]["headerKey"] == b"headerValue" diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py index 8a87075d16c..dbef57162e2 100644 --- a/tests/functional/test_data_classes.py +++ b/tests/functional/test_data_classes.py @@ -13,9 +13,11 @@ APIGatewayProxyEvent, APIGatewayProxyEventV2, AppSyncResolverEvent, + CloudWatchDashboardCustomWidgetEvent, CloudWatchLogsEvent, CodePipelineJobEvent, EventBridgeEvent, + KafkaEvent, KinesisStreamEvent, S3Event, SESEvent, @@ -99,6 +101,44 @@ def message(self) -> str: assert DataClassSample(data1).raw_event is data1 +def test_cloud_watch_dashboard_event(): + event = CloudWatchDashboardCustomWidgetEvent(load_event("cloudWatchDashboardEvent.json")) + assert event.describe is False + assert event.widget_context.account_id == "123456789123" + assert event.widget_context.domain == "https://us-east-1.console.aws.amazon.com" + assert event.widget_context.dashboard_name == "Name-of-current-dashboard" + assert event.widget_context.widget_id == "widget-16" + assert event.widget_context.locale == "en" + assert event.widget_context.timezone.label == "UTC" + assert event.widget_context.timezone.offset_iso == "+00:00" + assert event.widget_context.timezone.offset_in_minutes == 0 + assert event.widget_context.period == 300 + assert event.widget_context.is_auto_period is True + assert event.widget_context.time_range.mode == "relative" + assert event.widget_context.time_range.start == 1627236199729 + assert event.widget_context.time_range.end == 1627322599729 + assert event.widget_context.time_range.relative_start == 86400012 + assert event.widget_context.time_range.zoom_start == 1627276030434 + assert event.widget_context.time_range.zoom_end == 1627282956521 + assert event.widget_context.theme == "light" + assert event.widget_context.link_charts is True + assert event.widget_context.title == "Tweets for Amazon website problem" + assert event.widget_context.forms == {} + assert event.widget_context.params == {"original": "param-to-widget"} + assert event.widget_context.width == 588 + assert event.widget_context.height == 369 + assert event.widget_context.params["original"] == "param-to-widget" + assert event["original"] == "param-to-widget" + assert event.raw_event["original"] == "param-to-widget" + + +def test_cloud_watch_dashboard_describe_event(): + event = CloudWatchDashboardCustomWidgetEvent({"describe": True}) + assert event.describe is True + assert event.widget_context is None + assert event.raw_event == {"describe": True} + + def test_cloud_watch_trigger_event(): event = CloudWatchLogsEvent(load_event("cloudWatchLogEvent.json")) @@ -1099,6 +1139,68 @@ def test_base_proxy_event_json_body_with_base64_encoded_data(): assert event.json_body == data +def test_kafka_msk_event(): + event = KafkaEvent(load_event("kafkaEventMsk.json")) + assert event.event_source == "aws:kafka" + assert ( + event.event_source_arn + == "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4" + ) + + bootstrap_servers_raw = "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092" # noqa E501 + + bootstrap_servers_list = [ + "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + ] + + assert event.bootstrap_servers == bootstrap_servers_raw + assert event.decoded_bootstrap_servers == bootstrap_servers_list + + records = list(event.records) + assert len(records) == 1 + record = records[0] + assert record.topic == "mytopic" + assert record.partition == 0 + assert record.offset == 15 + assert record.timestamp == 1545084650987 + assert record.timestamp_type == "CREATE_TIME" + assert record.decoded_key == b"recordKey" + assert record.value == "eyJrZXkiOiJ2YWx1ZSJ9" + assert record.json_value == {"key": "value"} + assert record.decoded_headers == {"headerKey": b"headerValue"} + assert record.get_header_value("HeaderKey", case_sensitive=False) == b"headerValue" + + +def test_kafka_self_managed_event(): + event = KafkaEvent(load_event("kafkaEventSelfManaged.json")) + assert event.event_source == "aws:SelfManagedKafka" + + bootstrap_servers_raw = "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092" # noqa E501 + + bootstrap_servers_list = [ + "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + ] + + assert event.bootstrap_servers == bootstrap_servers_raw + assert event.decoded_bootstrap_servers == bootstrap_servers_list + + records = list(event.records) + assert len(records) == 1 + record = records[0] + assert record.topic == "mytopic" + assert record.partition == 0 + assert record.offset == 15 + assert record.timestamp == 1545084650987 + assert record.timestamp_type == "CREATE_TIME" + assert record.decoded_key == b"recordKey" + assert record.value == "eyJrZXkiOiJ2YWx1ZSJ9" + assert record.json_value == {"key": "value"} + assert record.decoded_headers == {"headerKey": b"headerValue"} + assert record.get_header_value("HeaderKey", case_sensitive=False) == b"headerValue" + + def test_kinesis_stream_event(): event = KinesisStreamEvent(load_event("kinesisStreamEvent.json"))